优化更新调整了数据字段关联关系工具,可以正常显示并且添加和修改。

This commit is contained in:
python 2025-12-14 20:12:05 +08:00
parent 665612d2bf
commit cb4a07f148
730 changed files with 20762 additions and 33991 deletions

156
app.py
View File

@ -21,6 +21,54 @@ from utils.response import success_response, error_response
# 加载环境变量
load_dotenv()
def clean_query_result(data):
"""
清理查询结果 bytes 类型转换为字符串
用于处理数据库查询结果中的 BLOB 等字段
支持处理bytes, datetime, Decimal, 以及其他不可序列化的类型
"""
if isinstance(data, bytes):
# 如果是单个字节(如 TINYINT(1) 的 state 字段),转换为整数
if len(data) == 1:
return int.from_bytes(data, byteorder='big')
# 如果是多个字节(如字符串),解码为 UTF-8
try:
return data.decode('utf-8')
except UnicodeDecodeError:
return data.decode('utf-8', errors='ignore')
elif isinstance(data, dict):
return {key: clean_query_result(value) for key, value in data.items()}
elif isinstance(data, list):
return [clean_query_result(item) for item in data]
elif isinstance(data, datetime):
return data.isoformat()
elif isinstance(data, (int, float, str, bool, type(None))):
# 保持原始类型,但确保数字类型不会被意外转换
return data
elif hasattr(data, '__int__'):
# 处理 Decimal 等数值类型,转换为 int 或 float
try:
if isinstance(data, float) or (hasattr(data, 'as_tuple') and data.as_tuple()[2] < 0):
return float(data)
else:
return int(data)
except:
return str(data)
else:
# 对于其他类型(如 Decimal, date, time 等),尝试转换为字符串或 JSON 兼容类型
try:
# 尝试使用 JSON 默认处理
import json
json.dumps(data, default=str) # 测试是否可以序列化
return data
except (TypeError, ValueError):
# 如果无法序列化,转换为字符串
try:
return str(data)
except:
return None
app = Flask(__name__)
CORS(app) # 允许跨域请求
@ -857,20 +905,27 @@ def get_tenant_ids():
"""
获取数据库中所有已存在的 tenant_id
用于模板字段关联管理页面选择租户
从三个表中查询所有不同的 tenant_id字段表模板表关联表
"""
try:
conn = document_service.get_connection()
cursor = conn.cursor(pymysql.cursors.DictCursor)
try:
# 从 f_polic_file_config 表中获取所有不同的 tenant_id
# 从三个表中获取所有不同的 tenant_id合并去重
cursor.execute("""
SELECT DISTINCT tenant_id
FROM f_polic_file_config
WHERE tenant_id IS NOT NULL
FROM (
SELECT tenant_id FROM f_polic_field WHERE tenant_id IS NOT NULL
UNION
SELECT tenant_id FROM f_polic_file_config WHERE tenant_id IS NOT NULL
UNION
SELECT tenant_id FROM f_polic_file_field WHERE tenant_id IS NOT NULL
) AS all_tenants
ORDER BY tenant_id
""")
tenant_ids = [row['tenant_id'] for row in cursor.fetchall()]
# 将 tenant_id 转换为字符串,避免 JavaScript 大整数精度问题
tenant_ids = [str(row['tenant_id']) for row in cursor.fetchall()]
return success_response({'tenant_ids': tenant_ids})
@ -911,43 +966,81 @@ def get_template_field_relations():
WHERE tenant_id = %s AND state = 1
ORDER BY name
""", (tenant_id,))
templates = cursor.fetchall()
templates = cursor.fetchall() or []
templates = [clean_query_result(t) for t in templates]
# 获取指定 tenant_id 下所有启用的输入字段
# 注意:这里查询的是 state=1 的字段,但为了字段管理页面能显示所有字段的状态,应该查询所有字段
cursor.execute("""
SELECT id, name, filed_code, field_type
SELECT id, name, filed_code, field_type, state
FROM f_polic_field
WHERE tenant_id = %s AND field_type = 1 AND state = 1
WHERE tenant_id = %s AND field_type = 1
ORDER BY name
""", (tenant_id,))
input_fields = cursor.fetchall()
input_fields = cursor.fetchall() or []
input_fields = [clean_query_result(f) for f in input_fields]
# 确保 state 字段是整数类型(虽然这里查询的是 state=1但为了统一处理
for field in input_fields:
if 'state' in field:
try:
field['state'] = int(field['state'])
except (ValueError, TypeError):
field['state'] = 1
# 获取指定 tenant_id 下所有启用的输出字段
# 注意:这里查询的是 state=1 的字段,但为了字段管理页面能显示所有字段的状态,应该查询所有字段
cursor.execute("""
SELECT id, name, filed_code, field_type
SELECT id, name, filed_code, field_type, state
FROM f_polic_field
WHERE tenant_id = %s AND field_type = 2 AND state = 1
WHERE tenant_id = %s AND field_type = 2
ORDER BY name
""", (tenant_id,))
output_fields = cursor.fetchall()
output_fields = cursor.fetchall() or []
output_fields = [clean_query_result(f) for f in output_fields]
# 确保 state 字段是整数类型
for field in output_fields:
if 'state' in field:
try:
field['state'] = int(field['state'])
except (ValueError, TypeError):
field['state'] = 1
# 获取指定 tenant_id 下现有的关联关系
# 关联关系f_polic_file_field.file_id -> f_polic_file_config.id
# f_polic_file_field.filed_id -> f_polic_field.id
# 注意:只查询关联关系表中 state=1 的记录,不检查模板的 state
# 因为模板可能被禁用,但关联关系仍然有效
cursor.execute("""
SELECT fff.file_id, fff.filed_id
FROM f_polic_file_field fff
INNER JOIN f_polic_file_config fc ON fff.file_id = fc.id
WHERE fff.tenant_id = %s AND fff.state = 1
""", (tenant_id,))
relations = cursor.fetchall()
relations = cursor.fetchall() or []
relations = [clean_query_result(r) for r in relations]
# 构建关联关系映射 (file_id -> list of filed_id)
# 注意JSON 序列化时,字典的整数 key 会变成字符串
# 所以这里使用字符串 key前端需要处理类型转换
relation_map = {}
for rel in relations:
file_id = rel['file_id']
filed_id = rel['filed_id']
if file_id not in relation_map:
relation_map[file_id] = []
relation_map[file_id].append(filed_id)
# 确保 ID 是整数类型
try:
file_id = int(file_id)
filed_id = int(filed_id)
except (ValueError, TypeError):
continue # 跳过无效的关联关系
# 使用字符串 key因为 JSON 序列化会将数字 key 转为字符串
file_id_str = str(file_id)
if file_id_str not in relation_map:
relation_map[file_id_str] = []
relation_map[file_id_str].append(filed_id)
# 确保 relation_map 的 key 是整数类型JSON 序列化时 key 会变成字符串)
# 但为了前端能正确匹配,我们保持 key 为整数类型
# JSON 会自动将数字 key 转换为字符串,所以前端需要处理这种情况
return success_response({
'tenant_id': tenant_id,
@ -1072,10 +1165,10 @@ def save_template_field_relations():
# 字段管理 API
@app.route('/api/fields', methods=['GET'])
def get_fields():
@app.route('/api/field-management/fields', methods=['GET'])
def get_field_management_fields():
"""
获取字段列表
获取字段列表用于字段管理页面
查询参数: tenant_id (必填), field_type (可选: 1=输入字段, 2=输出字段)
"""
try:
@ -1114,6 +1207,16 @@ def get_fields():
""", (tenant_id,))
fields = cursor.fetchall()
# 清理查询结果,将 bytes 类型转换为字符串
fields = [clean_query_result(field) for field in fields] if fields else []
# 确保 state 字段是整数类型(数据库可能返回 Decimal 或其他类型)
for field in fields:
if 'state' in field:
try:
field['state'] = int(field['state'])
except (ValueError, TypeError):
field['state'] = 1 # 默认启用
# 即使没有数据也返回空数组,而不是错误
return success_response({'fields': fields})
finally:
@ -1124,7 +1227,7 @@ def get_fields():
return error_response(500, f"获取字段列表失败: {str(e)}")
@app.route('/api/fields', methods=['POST'])
@app.route('/api/field-management/fields', methods=['POST'])
def create_field():
"""
创建新字段
@ -1196,7 +1299,7 @@ def create_field():
return error_response(500, f"创建字段失败: {str(e)}")
@app.route('/api/fields/<int:field_id>', methods=['PUT'])
@app.route('/api/field-management/fields/<int:field_id>', methods=['PUT'])
def update_field(field_id):
"""
更新字段
@ -1301,7 +1404,7 @@ def update_field(field_id):
return error_response(500, f"更新字段失败: {str(e)}")
@app.route('/api/fields/<int:field_id>', methods=['DELETE'])
@app.route('/api/field-management/fields/<int:field_id>', methods=['DELETE'])
def delete_field(field_id):
"""
删除字段软删除 state 设置为 0
@ -1395,15 +1498,22 @@ def backup_database():
else:
cursor.execute(f"SELECT * FROM {table}")
backup_data[table] = cursor.fetchall()
rows = cursor.fetchall()
# 清理查询结果,将 bytes 类型转换为字符串
backup_data[table] = [clean_query_result(row) for row in rows] if rows else []
# 创建临时文件保存备份数据
# 确保所有数据都已清理,可以 JSON 序列化
temp_file = tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False, encoding='utf-8')
try:
json.dump({
'backup_time': datetime.now().isoformat(),
'tenant_id': tenant_id,
'tables': backup_data
}, temp_file, ensure_ascii=False, indent=2, default=str)
except (TypeError, ValueError) as e:
temp_file.close()
return error_response(500, f"备份数据序列化失败: {str(e)}")
temp_file.close()
return send_file(

View File

@ -0,0 +1,140 @@
"""
检查数据库中的实际数据查看有哪些 tenant_id 以及对应的数据量
"""
import pymysql
import os
from dotenv import load_dotenv
load_dotenv()
# 数据库连接配置
DB_CONFIG = {
'host': os.getenv('DB_HOST', '152.136.177.240'),
'port': int(os.getenv('DB_PORT', 5012)),
'user': os.getenv('DB_USER', 'finyx'),
'password': os.getenv('DB_PASSWORD', '6QsGK6MpePZDE57Z'),
'database': os.getenv('DB_NAME', 'finyx'),
'charset': 'utf8mb4'
}
def check_tenant_data():
"""检查各个表中的 tenant_id 数据"""
conn = pymysql.connect(**DB_CONFIG)
cursor = conn.cursor(pymysql.cursors.DictCursor)
try:
print("=" * 80)
print("检查数据库中的 tenant_id 数据")
print("=" * 80)
# 1. 检查 f_polic_field 表中的 tenant_id
print("\n1. f_polic_field 表中的 tenant_id 分布:")
cursor.execute("""
SELECT tenant_id,
COUNT(*) as total_count,
SUM(CASE WHEN field_type = 1 THEN 1 ELSE 0 END) as input_count,
SUM(CASE WHEN field_type = 2 THEN 1 ELSE 0 END) as output_count,
SUM(CASE WHEN state = 1 THEN 1 ELSE 0 END) as enabled_count
FROM f_polic_field
GROUP BY tenant_id
ORDER BY tenant_id
""")
field_tenants = cursor.fetchall()
for row in field_tenants:
print(f" tenant_id: {row['tenant_id']}")
print(f" 总字段数: {row['total_count']}, 输入字段: {row['input_count']}, 输出字段: {row['output_count']}, 启用: {row['enabled_count']}")
# 2. 检查 f_polic_file_config 表中的 tenant_id
print("\n2. f_polic_file_config 表中的 tenant_id 分布:")
cursor.execute("""
SELECT tenant_id,
COUNT(*) as total_count,
SUM(CASE WHEN state = 1 THEN 1 ELSE 0 END) as enabled_count
FROM f_polic_file_config
GROUP BY tenant_id
ORDER BY tenant_id
""")
config_tenants = cursor.fetchall()
for row in config_tenants:
print(f" tenant_id: {row['tenant_id']}")
print(f" 总模板数: {row['total_count']}, 启用: {row['enabled_count']}")
# 3. 检查 f_polic_file_field 表中的 tenant_id
print("\n3. f_polic_file_field 表中的 tenant_id 分布:")
cursor.execute("""
SELECT tenant_id,
COUNT(*) as total_count,
SUM(CASE WHEN state = 1 THEN 1 ELSE 0 END) as enabled_count
FROM f_polic_file_field
GROUP BY tenant_id
ORDER BY tenant_id
""")
relation_tenants = cursor.fetchall()
for row in relation_tenants:
print(f" tenant_id: {row['tenant_id']}")
print(f" 总关联数: {row['total_count']}, 启用: {row['enabled_count']}")
# 4. 检查特定 tenant_id 的详细数据
test_tenant_id = 615873064429507600
print(f"\n4. 检查 tenant_id = {test_tenant_id} 的详细数据:")
# 字段数据
cursor.execute("""
SELECT COUNT(*) as count
FROM f_polic_field
WHERE tenant_id = %s
""", (test_tenant_id,))
field_count = cursor.fetchone()['count']
print(f" f_polic_field 表中的字段数: {field_count}")
if field_count > 0:
cursor.execute("""
SELECT id, name, filed_code, field_type, state
FROM f_polic_field
WHERE tenant_id = %s
LIMIT 10
""", (test_tenant_id,))
sample_fields = cursor.fetchall()
print(f" 示例字段前10条")
for field in sample_fields:
print(f" ID: {field['id']}, 名称: {field['name']}, 编码: {field['filed_code']}, 类型: {field['field_type']}, 状态: {field['state']}")
# 模板数据
cursor.execute("""
SELECT COUNT(*) as count
FROM f_polic_file_config
WHERE tenant_id = %s
""", (test_tenant_id,))
template_count = cursor.fetchone()['count']
print(f" f_polic_file_config 表中的模板数: {template_count}")
# 关联数据
cursor.execute("""
SELECT COUNT(*) as count
FROM f_polic_file_field
WHERE tenant_id = %s
""", (test_tenant_id,))
relation_count = cursor.fetchone()['count']
print(f" f_polic_file_field 表中的关联数: {relation_count}")
# 5. 检查所有不同的 tenant_id
print("\n5. 所有表中出现的 tenant_id 汇总:")
cursor.execute("""
SELECT DISTINCT tenant_id FROM f_polic_field
UNION
SELECT DISTINCT tenant_id FROM f_polic_file_config
UNION
SELECT DISTINCT tenant_id FROM f_polic_file_field
ORDER BY tenant_id
""")
all_tenants = cursor.fetchall()
print(" 所有 tenant_id 列表:")
for row in all_tenants:
print(f" {row['tenant_id']}")
finally:
cursor.close()
conn.close()
if __name__ == '__main__':
check_tenant_data()

88
check_relations_query.py Normal file
View File

@ -0,0 +1,88 @@
"""
检查关联关系查询逻辑
"""
import pymysql
import os
from dotenv import load_dotenv
load_dotenv()
DB_CONFIG = {
'host': os.getenv('DB_HOST', '152.136.177.240'),
'port': int(os.getenv('DB_PORT', 5012)),
'user': os.getenv('DB_USER', 'finyx'),
'password': os.getenv('DB_PASSWORD', '6QsGK6MpePZDE57Z'),
'database': os.getenv('DB_NAME', 'finyx'),
'charset': 'utf8mb4'
}
TENANT_ID = 615873064429507639
def check_relations():
"""检查关联关系查询"""
conn = pymysql.connect(**DB_CONFIG)
cursor = conn.cursor(pymysql.cursors.DictCursor)
try:
# 检查一个具体模板的关联关系
template_id = 1765273962716807 # 走读式谈话流程
print(f"检查模板 ID: {template_id}")
# 方法1: 当前 API 使用的查询
print("\n方法1: 当前 API 使用的查询(带 INNER JOIN 和 state=1:")
cursor.execute("""
SELECT fff.file_id, fff.filed_id, fff.state as relation_state, fc.state as template_state
FROM f_polic_file_field fff
INNER JOIN f_polic_file_config fc ON fff.file_id = fc.id AND fff.tenant_id = fc.tenant_id
WHERE fff.tenant_id = %s AND fff.state = 1 AND fff.file_id = %s
""", (TENANT_ID, template_id))
results1 = cursor.fetchall()
print(f" 结果数: {len(results1)}")
for r in results1[:5]:
print(f" file_id: {r['file_id']}, filed_id: {r['filed_id']}, relation_state: {r['relation_state']}, template_state: {r['template_state']}")
# 方法2: 只查询关联表,不检查模板状态
print("\n方法2: 只查询关联表(不检查模板状态):")
cursor.execute("""
SELECT fff.file_id, fff.filed_id, fff.state as relation_state
FROM f_polic_file_field fff
WHERE fff.tenant_id = %s AND fff.state = 1 AND fff.file_id = %s
""", (TENANT_ID, template_id))
results2 = cursor.fetchall()
print(f" 结果数: {len(results2)}")
for r in results2[:5]:
print(f" file_id: {r['file_id']}, filed_id: {r['filed_id']}, relation_state: {r['relation_state']}")
# 方法3: 检查模板是否存在且启用
print("\n方法3: 检查模板状态:")
cursor.execute("""
SELECT id, name, state
FROM f_polic_file_config
WHERE tenant_id = %s AND id = %s
""", (TENANT_ID, template_id))
template = cursor.fetchone()
if template:
print(f" 模板存在: {template['name']}, state: {template['state']}")
else:
print(f" 模板不存在")
# 检查所有关联关系(包括 state=0 的)
print("\n方法4: 检查所有关联关系(包括未启用的):")
cursor.execute("""
SELECT fff.file_id, fff.filed_id, fff.state as relation_state
FROM f_polic_file_field fff
WHERE fff.tenant_id = %s AND fff.file_id = %s
""", (TENANT_ID, template_id))
results4 = cursor.fetchall()
print(f" 结果数: {len(results4)}")
enabled = [r for r in results4 if r['relation_state'] == 1]
disabled = [r for r in results4 if r['relation_state'] == 0]
print(f" 启用: {len(enabled)}, 未启用: {len(disabled)}")
finally:
cursor.close()
conn.close()
if __name__ == '__main__':
check_relations()

View File

@ -0,0 +1,198 @@
"""
检查特定模板的关联关系
"""
import pymysql
import os
import re
from pathlib import Path
from docx import Document
from dotenv import load_dotenv
load_dotenv()
DB_CONFIG = {
'host': os.getenv('DB_HOST', '152.136.177.240'),
'port': int(os.getenv('DB_PORT', 5012)),
'user': os.getenv('DB_USER', 'finyx'),
'password': os.getenv('DB_PASSWORD', '6QsGK6MpePZDE57Z'),
'database': os.getenv('DB_NAME', 'finyx'),
'charset': 'utf8mb4'
}
TENANT_ID = 615873064429507639
TEMPLATE_NAME = "1.请示报告卡(初核谈话)"
TEMPLATE_FILE = "template_finish/2-初核模版/2.谈话审批/走读式谈话审批/1.请示报告卡(初核谈话).docx"
def extract_placeholders_from_docx(file_path: str):
"""从docx文件中提取所有占位符"""
placeholders = set()
pattern = r'\{\{([^}]+)\}\}'
try:
doc = Document(file_path)
# 从段落中提取占位符
for paragraph in doc.paragraphs:
text = paragraph.text
matches = re.findall(pattern, text)
for match in matches:
cleaned = match.strip()
if cleaned and '{' not in cleaned and '}' not in cleaned:
placeholders.add(cleaned)
# 从表格中提取占位符
for table in doc.tables:
for row in table.rows:
for cell in row.cells:
for paragraph in cell.paragraphs:
text = paragraph.text
matches = re.findall(pattern, text)
for match in matches:
cleaned = match.strip()
if cleaned and '{' not in cleaned and '}' not in cleaned:
placeholders.add(cleaned)
except Exception as e:
print(f"错误: 读取文件失败 - {str(e)}")
return []
return sorted(list(placeholders))
def check_template():
"""检查模板的关联关系"""
conn = pymysql.connect(**DB_CONFIG)
cursor = conn.cursor(pymysql.cursors.DictCursor)
try:
print(f"检查模板: {TEMPLATE_NAME}")
print("=" * 80)
# 1. 从文档提取占位符
print("\n1. 从文档提取占位符:")
if not Path(TEMPLATE_FILE).exists():
print(f" 文件不存在: {TEMPLATE_FILE}")
return
placeholders = extract_placeholders_from_docx(TEMPLATE_FILE)
print(f" 占位符数量: {len(placeholders)}")
print(f" 占位符列表: {placeholders}")
# 2. 查询模板ID
print(f"\n2. 查询模板ID:")
cursor.execute("""
SELECT id, name
FROM f_polic_file_config
WHERE tenant_id = %s AND name = %s
""", (TENANT_ID, TEMPLATE_NAME))
template = cursor.fetchone()
if not template:
print(f" 模板不存在")
return
template_id = template['id']
print(f" 模板ID: {template_id}")
# 3. 查询字段映射
print(f"\n3. 查询字段映射:")
cursor.execute("""
SELECT id, name, filed_code, field_type, state
FROM f_polic_field
WHERE tenant_id = %s
""", (TENANT_ID,))
fields = cursor.fetchall()
field_map = {}
for field in fields:
state = field['state']
if isinstance(state, bytes):
state = int.from_bytes(state, byteorder='big') if len(state) == 1 else 1
field_map[field['filed_code']] = {
'id': field['id'],
'name': field['name'],
'field_type': field['field_type'],
'state': state
}
print(f" 字段总数: {len(field_map)}")
# 4. 匹配占位符到字段
print(f"\n4. 匹配占位符到字段:")
input_field_ids = []
output_field_ids = []
not_found = []
for placeholder in placeholders:
if placeholder in field_map:
field_info = field_map[placeholder]
if field_info['state'] == 1:
if field_info['field_type'] == 1:
input_field_ids.append(field_info['id'])
elif field_info['field_type'] == 2:
output_field_ids.append(field_info['id'])
else:
not_found.append(placeholder)
# 添加必需的输入字段
required_input_fields = ['clue_info', 'target_basic_info_clue']
for req_field in required_input_fields:
if req_field in field_map:
field_info = field_map[req_field]
if field_info['state'] == 1 and field_info['id'] not in input_field_ids:
input_field_ids.append(field_info['id'])
print(f" 输入字段ID: {input_field_ids}")
print(f" 输出字段ID: {output_field_ids}")
if not_found:
print(f" 未找到的占位符: {not_found}")
# 5. 查询数据库中的关联关系
print(f"\n5. 查询数据库中的关联关系:")
cursor.execute("""
SELECT fff.filed_id, fff.state, f.name, f.field_type
FROM f_polic_file_field fff
INNER JOIN f_polic_field f ON fff.filed_id = f.id AND fff.tenant_id = f.tenant_id
WHERE fff.tenant_id = %s AND fff.file_id = %s
""", (TENANT_ID, template_id))
db_relations = cursor.fetchall()
db_input_ids = []
db_output_ids = []
for rel in db_relations:
state = rel['state']
if isinstance(state, bytes):
state = int.from_bytes(state, byteorder='big') if len(state) == 1 else 1
if state == 1:
if rel['field_type'] == 1:
db_input_ids.append(rel['filed_id'])
elif rel['field_type'] == 2:
db_output_ids.append(rel['filed_id'])
print(f" 数据库中的输入字段ID: {sorted(db_input_ids)}")
print(f" 数据库中的输出字段ID: {sorted(db_output_ids)}")
# 6. 对比
print(f"\n6. 对比结果:")
expected_input = set(input_field_ids)
expected_output = set(output_field_ids)
actual_input = set(db_input_ids)
actual_output = set(db_output_ids)
print(f" 输入字段 - 期望: {sorted(expected_input)}, 实际: {sorted(actual_input)}")
print(f" 输入字段匹配: {expected_input == actual_input}")
print(f" 输出字段 - 期望: {sorted(expected_output)}, 实际: {sorted(actual_output)}")
print(f" 输出字段匹配: {expected_output == actual_output}")
if expected_output != actual_output:
missing = expected_output - actual_output
extra = actual_output - expected_output
print(f" 缺少的输出字段: {sorted(missing)}")
print(f" 多余的输出字段: {sorted(extra)}")
finally:
cursor.close()
conn.close()
if __name__ == '__main__':
check_template()

View File

@ -0,0 +1,98 @@
"""
检查模板的所有关联关系包括未启用的
"""
import pymysql
import os
from dotenv import load_dotenv
load_dotenv()
DB_CONFIG = {
'host': os.getenv('DB_HOST', '152.136.177.240'),
'port': int(os.getenv('DB_PORT', 5012)),
'user': os.getenv('DB_USER', 'finyx'),
'password': os.getenv('DB_PASSWORD', '6QsGK6MpePZDE57Z'),
'database': os.getenv('DB_NAME', 'finyx'),
'charset': 'utf8mb4'
}
TENANT_ID = 615873064429507639
TEMPLATE_ID = 1765432134276990 # 1.请示报告卡(初核谈话)
def check_all_relations():
"""检查模板的所有关联关系"""
conn = pymysql.connect(**DB_CONFIG)
cursor = conn.cursor(pymysql.cursors.DictCursor)
try:
print(f"检查模板 ID: {TEMPLATE_ID}")
print("=" * 80)
# 查询模板信息
cursor.execute("""
SELECT id, name, state
FROM f_polic_file_config
WHERE tenant_id = %s AND id = %s
""", (TENANT_ID, TEMPLATE_ID))
template = cursor.fetchone()
if template:
print(f"模板名称: {template['name']}")
print(f"模板状态: {template['state']}")
else:
print("模板不存在")
return
# 查询所有关联关系(包括 state=0 的)
cursor.execute("""
SELECT
fff.file_id,
fff.filed_id,
fff.state as relation_state,
f.name as field_name,
f.field_type,
f.state as field_state
FROM f_polic_file_field fff
INNER JOIN f_polic_field f ON fff.filed_id = f.id AND fff.tenant_id = f.tenant_id
WHERE fff.tenant_id = %s AND fff.file_id = %s
ORDER BY f.field_type, f.name
""", (TENANT_ID, TEMPLATE_ID))
all_relations = cursor.fetchall()
print(f"\n所有关联关系数: {len(all_relations)}")
# 按状态分组
enabled_relations = [r for r in all_relations if r['relation_state'] == 1 or (isinstance(r['relation_state'], bytes) and r['relation_state'] == b'\x01')]
disabled_relations = [r for r in all_relations if r not in enabled_relations]
print(f"启用的关联关系: {len(enabled_relations)}")
print(f"未启用的关联关系: {len(disabled_relations)}")
# 按字段类型分组
input_fields = [r for r in enabled_relations if r['field_type'] == 1]
output_fields = [r for r in enabled_relations if r['field_type'] == 2]
print(f"\n启用的输入字段关联: {len(input_fields)}")
for r in input_fields:
state_str = str(r['relation_state']) if not isinstance(r['relation_state'], bytes) else 'bytes'
print(f" - {r['field_name']} (ID: {r['filed_id']}, relation_state: {state_str}, field_state: {r['field_state']})")
print(f"\n启用的输出字段关联: {len(output_fields)}")
for r in output_fields[:10]:
state_str = str(r['relation_state']) if not isinstance(r['relation_state'], bytes) else 'bytes'
print(f" - {r['field_name']} (ID: {r['filed_id']}, relation_state: {state_str}, field_state: {r['field_state']})")
if len(output_fields) > 10:
print(f" ... 还有 {len(output_fields) - 10} 个输出字段")
# 检查未启用的关联关系
if disabled_relations:
print(f"\n未启用的关联关系: {len(disabled_relations)}")
disabled_input = [r for r in disabled_relations if r['field_type'] == 1]
disabled_output = [r for r in disabled_relations if r['field_type'] == 2]
print(f" 输入字段: {len(disabled_input)}, 输出字段: {len(disabled_output)}")
finally:
cursor.close()
conn.close()
if __name__ == '__main__':
check_all_relations()

View File

@ -0,0 +1,76 @@
"""
检查哪些模板有输出字段关联
"""
import pymysql
import os
from dotenv import load_dotenv
load_dotenv()
DB_CONFIG = {
'host': os.getenv('DB_HOST', '152.136.177.240'),
'port': int(os.getenv('DB_PORT', 5012)),
'user': os.getenv('DB_USER', 'finyx'),
'password': os.getenv('DB_PASSWORD', '6QsGK6MpePZDE57Z'),
'database': os.getenv('DB_NAME', 'finyx'),
'charset': 'utf8mb4'
}
TENANT_ID = 615873064429507639
def check_templates_with_output_fields():
"""检查哪些模板有输出字段关联"""
conn = pymysql.connect(**DB_CONFIG)
cursor = conn.cursor(pymysql.cursors.DictCursor)
try:
# 查询所有模板及其关联的输出字段
cursor.execute("""
SELECT
fc.id as template_id,
fc.name as template_name,
COUNT(CASE WHEN f.field_type = 2 THEN 1 END) as output_field_count,
COUNT(CASE WHEN f.field_type = 1 THEN 1 END) as input_field_count,
COUNT(*) as total_field_count
FROM f_polic_file_config fc
INNER JOIN f_polic_file_field fff ON fc.id = fff.file_id AND fc.tenant_id = fff.tenant_id
INNER JOIN f_polic_field f ON fff.filed_id = f.id AND fff.tenant_id = f.tenant_id
WHERE fc.tenant_id = %s
AND fff.state = 1
AND fc.state = 1
GROUP BY fc.id, fc.name
HAVING output_field_count > 0
ORDER BY output_field_count DESC
LIMIT 10
""", (TENANT_ID,))
templates = cursor.fetchall()
print(f"有输出字段关联的模板前10个:")
print("=" * 80)
for t in templates:
print(f"\n模板: {t['template_name']} (ID: {t['template_id']})")
print(f" 输入字段: {t['input_field_count']}, 输出字段: {t['output_field_count']}, 总计: {t['total_field_count']}")
# 查询该模板的具体输出字段
cursor.execute("""
SELECT f.id, f.name, f.filed_code
FROM f_polic_file_field fff
INNER JOIN f_polic_field f ON fff.filed_id = f.id AND fff.tenant_id = f.tenant_id
WHERE fff.tenant_id = %s
AND fff.file_id = %s
AND fff.state = 1
AND f.field_type = 2
LIMIT 5
""", (TENANT_ID, t['template_id']))
output_fields = cursor.fetchall()
print(f" 输出字段示例前5个:")
for f in output_fields:
print(f" - {f['name']} (ID: {f['id']}, code: {f['filed_code']})")
finally:
cursor.close()
conn.close()
if __name__ == '__main__':
check_templates_with_output_fields()

View File

@ -562,7 +562,7 @@
</div>
<script>
let currentTenantId = null;
let currentTenantId = null; // 存储为字符串,避免大整数精度问题
let templates = [];
let inputFields = [];
let outputFields = [];
@ -574,46 +574,96 @@
// 页面加载时初始化
window.onload = function() {
// 确保 currentTenantId 初始化为 null
currentTenantId = null;
console.log('页面加载,初始化 currentTenantId 为 null');
// 清除可能存在的 localStorage 缓存
try {
localStorage.removeItem('currentTenantId');
sessionStorage.removeItem('currentTenantId');
} catch (e) {
console.warn('清除缓存失败:', e);
}
loadTenantIds();
};
// 加载租户ID列表
async function loadTenantIds() {
try {
console.log('开始加载租户ID列表...');
const response = await fetch('/api/tenant-ids');
console.log('API响应状态:', response.status);
const result = await response.json();
console.log('API返回结果:', result);
if (result.isSuccess) {
const tenantIds = result.data.tenant_ids || [];
console.log('获取到的租户ID列表:', tenantIds);
const select = document.getElementById('tenantSelect');
// 不保存当前选中的值,每次都重新选择
select.innerHTML = '<option value="">请选择租户ID...</option>';
result.data.tenant_ids.forEach(tenantId => {
if (tenantIds.length === 0) {
select.innerHTML = '<option value="">数据库中没有租户ID数据</option>';
showMessage('数据库中没有找到任何租户ID数据', 'error');
currentTenantId = null;
return;
}
tenantIds.forEach(tenantId => {
const option = document.createElement('option');
// tenantId 已经是字符串,直接使用
option.value = tenantId;
option.textContent = tenantId;
select.appendChild(option);
});
select.onchange = function() {
// 移除所有旧的事件监听器
const newSelect = select.cloneNode(true);
select.parentNode.replaceChild(newSelect, select);
const freshSelect = document.getElementById('tenantSelect');
// 设置新的事件监听器(使用 onchange 而不是 addEventListener避免重复
freshSelect.onchange = function() {
const tenantId = this.value;
console.log('租户ID选择变化:', tenantId, '之前的值:', currentTenantId);
if (tenantId) {
currentTenantId = parseInt(tenantId);
// 将 tenantId 转换为数字(用于数据库查询)
// 注意:大整数在 JavaScript 中可能会有精度问题,但这里只是用于显示和传递
const oldTenantId = currentTenantId;
currentTenantId = tenantId; // 保持为字符串,在发送请求时再转换
console.log('更新 currentTenantId:', oldTenantId, '->', currentTenantId);
console.log('准备调用 loadDatacurrentTenantId =', currentTenantId);
loadData();
} else {
currentTenantId = null;
hideAllSections();
}
};
showMessage(`成功加载 ${tenantIds.length} 个租户ID`, 'success');
} else {
showMessage('加载租户ID列表失败: ' + result.errorMsg, 'error');
console.error('API返回错误:', result);
showMessage('加载租户ID列表失败: ' + (result.errorMsg || '未知错误'), 'error');
}
} catch (error) {
console.error('加载租户ID列表异常:', error);
showMessage('加载租户ID列表失败: ' + error.message, 'error');
}
}
// 加载数据
async function loadData() {
console.log('========== loadData 被调用 ==========');
console.log('currentTenantId 的值:', currentTenantId);
console.log('currentTenantId 的类型:', typeof currentTenantId);
if (!currentTenantId) {
console.warn('currentTenantId 为空,无法加载数据');
return;
}
@ -621,25 +671,92 @@
hideAllSections();
try {
const response = await fetch(`/api/template-field-relations?tenant_id=${currentTenantId}`);
const result = await response.json();
// 不要转换为数字!直接使用字符串,避免大整数精度丢失
// JavaScript 的 Number.MAX_SAFE_INTEGER 是 2^53 - 1 = 9007199254740991
// 615873064429507639 超过了这个值parseInt 会丢失精度
const tenantId = currentTenantId; // 直接使用字符串
console.log('使用的 tenant_id (字符串):', tenantId);
console.log('tenant_id 的类型:', typeof tenantId);
console.log('准备加载数据,使用的 tenant_id:', tenantId);
console.log('API URL 1:', `/api/template-field-relations?tenant_id=${tenantId}`);
console.log('API URL 2:', `/api/field-management/fields?tenant_id=${tenantId}`);
if (result.isSuccess) {
templates = result.data.templates || [];
inputFields = result.data.input_fields || [];
outputFields = result.data.output_fields || [];
relations = result.data.relations || {};
// 同时加载模板字段关联数据和字段管理数据
const [relationsResponse, fieldsResponse] = await Promise.all([
fetch(`/api/template-field-relations?tenant_id=${tenantId}`),
fetch(`/api/field-management/fields?tenant_id=${tenantId}`)
]);
allFields = [...inputFields, ...outputFields];
console.log('API 响应状态:', {
relations: relationsResponse.status,
fields: fieldsResponse.status
});
const relationsResult = await relationsResponse.json();
const fieldsResult = await fieldsResponse.json();
console.log('API 返回结果:', {
relations: relationsResult,
fields: fieldsResult
});
if (relationsResult.isSuccess && fieldsResult.isSuccess) {
// 处理模板字段关联数据
templates = relationsResult.data.templates || [];
inputFields = relationsResult.data.input_fields || [];
outputFields = relationsResult.data.output_fields || [];
let rawRelations = relationsResult.data.relations || {};
// 处理字段管理数据(包含所有字段,包括未启用的)
allFields = fieldsResult.data.fields || [];
// 确保 relations 对象的 key 是字符串类型JSON 序列化后 key 是字符串)
relations = {};
for (const [key, value] of Object.entries(rawRelations)) {
// key 可能是字符串或数字,统一转换为字符串
const keyStr = String(key);
relations[keyStr] = value;
}
console.log('========== 加载数据 ==========');
console.log('数据统计:', {
tenant_id: tenantId,
templates: templates.length,
inputFields: inputFields.length,
outputFields: outputFields.length,
allFields: allFields.length,
relationsCount: Object.keys(relations).length
});
if (Object.keys(relations).length > 0) {
const firstKey = Object.keys(relations)[0];
console.log('relations 对象示例:', {
firstKey: firstKey,
firstKeyType: typeof firstKey,
firstValueCount: relations[firstKey].length,
firstValueSample: relations[firstKey].slice(0, 3)
});
}
console.log('==============================');
// 更新模板选择框
populateTemplateSelect();
refreshFields();
// 显示所有区域
document.getElementById('fieldManagementSection').style.display = 'block';
document.getElementById('templateFieldSection').style.display = 'block';
document.getElementById('backupSection').style.display = 'block';
// 确保tab正确显示然后渲染字段表格
switchTab('input');
document.getElementById('loading').style.display = 'none';
} else {
showMessage('加载数据失败: ' + result.errorMsg, 'error');
const errorMsg = relationsResult.isSuccess ? fieldsResult.errorMsg : relationsResult.errorMsg;
showMessage('加载数据失败: ' + errorMsg, 'error');
console.error('加载数据失败:', {
relationsResult,
fieldsResult
});
document.getElementById('loading').style.display = 'none';
}
} catch (error) {
@ -685,22 +802,87 @@
currentTemplateId = templateId;
const template = templates.find(t => t.id === templateId);
const relatedFieldIds = new Set(relations[templateId] || []);
// 确保 templateId 是数字类型
const templateIdNum = parseInt(templateId);
// 获取该模板关联的所有字段ID
// relations 对象的 key 在 JSON 序列化后是字符串,所以使用字符串 key 查找
const templateIdStr = String(templateIdNum);
let relatedFieldIdsArray = relations[templateIdStr] || relations[templateIdNum] || [];
// 转换为 Set并确保类型一致都转换为数字进行比较
const relatedFieldIds = new Set(relatedFieldIdsArray.map(id => {
const numId = parseInt(id);
return isNaN(numId) ? null : numId;
}).filter(id => id !== null));
console.log('========== 加载模板字段 ==========');
console.log('模板信息:', {
templateId: templateIdNum,
templateIdStr: templateIdStr,
templateName: template ? template.name : '未知',
relationsKeysCount: Object.keys(relations).length,
relationsKeysSample: Object.keys(relations).slice(0, 5)
});
console.log('关联关系查找:', {
'relations[templateIdStr]': relations[templateIdStr],
'relations[templateIdNum]': relations[templateIdNum],
relatedFieldIdsArray: relatedFieldIdsArray,
relatedFieldIds: Array.from(relatedFieldIds),
relatedFieldIdsCount: relatedFieldIds.size
});
console.log('字段列表:', {
inputFieldsCount: inputFields.length,
outputFieldsCount: outputFields.length,
inputFieldIds: inputFields.slice(0, 3).map(f => ({id: f.id, name: f.name})),
outputFieldIds: outputFields.slice(0, 3).map(f => ({id: f.id, name: f.name}))
});
selectedInputFields = new Set();
selectedOutputFields = new Set();
// 检查输入字段关联
let inputMatchCount = 0;
inputFields.forEach(field => {
if (relatedFieldIds.has(field.id)) {
selectedInputFields.add(field.id);
const fieldId = parseInt(field.id);
if (!isNaN(fieldId) && relatedFieldIds.has(fieldId)) {
selectedInputFields.add(fieldId);
inputMatchCount++;
console.log(`[输入字段匹配 ${inputMatchCount}]`, field.name, `(ID: ${fieldId})`);
}
});
// 检查输出字段关联
let outputMatchCount = 0;
outputFields.forEach(field => {
if (relatedFieldIds.has(field.id)) {
selectedOutputFields.add(field.id);
const fieldId = parseInt(field.id);
// 确保 fieldId 是有效数字
if (!isNaN(fieldId)) {
// 检查是否在关联字段ID集合中
if (relatedFieldIds.has(fieldId)) {
selectedOutputFields.add(fieldId);
outputMatchCount++;
console.log(`[输出字段匹配 ${outputMatchCount}]`, field.name, `(ID: ${fieldId})`);
} else {
// 调试:检查为什么没有匹配
if (relatedFieldIds.size > 0 && outputMatchCount === 0) {
console.log(`[调试] 字段 ${field.name} (ID: ${fieldId}) 不在关联集合中`);
console.log(`[调试] 关联集合包含: ${Array.from(relatedFieldIds).slice(0, 5)}`);
}
}
} else {
console.warn(`[警告] 字段 ${field.name} 的ID无效: ${field.id}`);
}
});
console.log('最终选中的字段:', {
inputFields: Array.from(selectedInputFields),
outputFields: Array.from(selectedOutputFields),
inputFieldsSize: selectedInputFields.size,
outputFieldsSize: selectedOutputFields.size
});
console.log('====================================');
renderFields();
updateStats();
document.getElementById('fieldsContainer').style.display = 'grid';
@ -735,15 +917,17 @@
}
container.innerHTML = filteredFields.map(field => {
const isChecked = selectedSet.has(field.id);
// 确保 field.id 和 selectedSet 中的值类型一致(都转换为数字)
const fieldId = parseInt(field.id);
const isChecked = selectedSet.has(fieldId);
return `
<div class="field-item ${isChecked ? 'checked' : ''}" onclick="toggleField(${field.id}, '${type}')">
<div class="field-item ${isChecked ? 'checked' : ''}" onclick="toggleField(${fieldId}, '${type}')">
<div class="field-info">
<div class="field-name">${escapeHtml(field.name)}</div>
<div class="field-code">${escapeHtml(field.filed_code)}</div>
</div>
<input type="checkbox" ${isChecked ? 'checked' : ''}
onclick="event.stopPropagation(); toggleField(${field.id}, '${type}')">
onclick="event.stopPropagation(); toggleField(${fieldId}, '${type}')">
</div>
`;
}).join('');
@ -751,6 +935,9 @@
// 切换字段选择
function toggleField(fieldId, type) {
// 确保 fieldId 是数字类型
fieldId = parseInt(fieldId);
if (type === 'input') {
if (selectedInputFields.has(fieldId)) {
selectedInputFields.delete(fieldId);
@ -808,13 +995,16 @@
btn.textContent = '保存中...';
try {
// 不要转换为数字!直接使用字符串,避免大整数精度丢失
const tenantId = currentTenantId; // 直接使用字符串
const response = await fetch('/api/template-field-relations', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
tenant_id: currentTenantId,
tenant_id: tenantId, // 后端会处理字符串到数字的转换
template_id: currentTemplateId,
input_field_ids: Array.from(selectedInputFields),
output_field_ids: Array.from(selectedOutputFields)
@ -870,13 +1060,15 @@
document.querySelectorAll('.tab-content').forEach(content => content.classList.remove('active'));
if (type === 'input') {
document.querySelectorAll('.tab')[0].classList.add('active');
const tabs = document.querySelectorAll('.tab');
if (tabs[0]) tabs[0].classList.add('active');
document.getElementById('inputFieldsTab').classList.add('active');
} else {
document.querySelectorAll('.tab')[1].classList.add('active');
const tabs = document.querySelectorAll('.tab');
if (tabs[1]) tabs[1].classList.add('active');
document.getElementById('outputFieldsTab').classList.add('active');
}
refreshFields();
renderFieldsTable();
}
async function refreshFields() {
@ -885,16 +1077,41 @@
}
try {
const response = await fetch(`/api/fields?tenant_id=${currentTenantId}`);
const result = await response.json();
// 不要转换为数字!直接使用字符串,避免大整数精度丢失
const tenantId = currentTenantId; // 直接使用字符串
if (result.isSuccess) {
allFields = result.data.fields || [];
inputFields = allFields.filter(f => f.field_type === 1);
outputFields = allFields.filter(f => f.field_type === 2);
// 同时刷新字段管理数据和模板字段关联数据
const [fieldsResponse, relationsResponse] = await Promise.all([
fetch(`/api/field-management/fields?tenant_id=${tenantId}`),
fetch(`/api/template-field-relations?tenant_id=${tenantId}`)
]);
const fieldsResult = await fieldsResponse.json();
const relationsResult = await relationsResponse.json();
if (fieldsResult.isSuccess && relationsResult.isSuccess) {
// 更新字段管理数据
allFields = fieldsResult.data.fields || [];
// 更新模板字段关联数据
templates = relationsResult.data.templates || [];
inputFields = relationsResult.data.input_fields || [];
outputFields = relationsResult.data.output_fields || [];
relations = relationsResult.data.relations || {};
// 更新模板选择框
populateTemplateSelect();
// 更新字段表格
renderFieldsTable();
// 如果当前选择了模板,重新加载模板字段
if (currentTemplateId) {
loadTemplateFields(currentTemplateId);
}
} else {
showMessage('刷新字段列表失败: ' + result.errorMsg, 'error');
const errorMsg = fieldsResult.isSuccess ? relationsResult.errorMsg : fieldsResult.errorMsg;
showMessage('刷新字段列表失败: ' + errorMsg, 'error');
}
} catch (error) {
showMessage('刷新字段列表失败: ' + error.message, 'error');
@ -903,22 +1120,52 @@
function renderFieldsTable() {
const activeTab = document.querySelector('.tab-content.active');
const isInput = activeTab.id === 'inputFieldsTab';
const fields = isInput ? inputFields : outputFields;
if (!activeTab) {
// 如果没有活动的tab默认显示输入字段tab
const inputTab = document.getElementById('inputFieldsTab');
if (inputTab) {
inputTab.classList.add('active');
const tabs = document.querySelectorAll('.tab');
if (tabs[0]) tabs[0].classList.add('active');
} else {
return;
}
}
const isInput = activeTab ? activeTab.id === 'inputFieldsTab' : true;
// 使用 allFields 而不是 inputFields/outputFields因为 allFields 包含所有字段(包括未启用的)
const fields = allFields.filter(f => f.field_type === (isInput ? 1 : 2));
const searchTerm = (isInput ?
document.getElementById('inputFieldSearch').value.toLowerCase() :
document.getElementById('outputFieldSearch').value.toLowerCase());
(document.getElementById('inputFieldSearch')?.value || '').toLowerCase() :
(document.getElementById('outputFieldSearch')?.value || '').toLowerCase());
const filteredFields = fields.filter(field => {
return field.name.toLowerCase().includes(searchTerm) ||
field.filed_code.toLowerCase().includes(searchTerm);
const name = (field.name || '').toLowerCase();
const code = (field.filed_code || '').toLowerCase();
return name.includes(searchTerm) || code.includes(searchTerm);
});
const tableId = isInput ? 'inputFieldsTable' : 'outputFieldsTable';
const container = document.getElementById(tableId);
if (!container) {
console.error('找不到容器:', tableId);
return;
}
console.log('渲染字段表格:', {
isInput,
allFieldsCount: allFields.length,
fieldsCount: fields.length,
filteredCount: filteredFields.length
});
if (filteredFields.length === 0) {
if (allFields.length === 0) {
container.innerHTML = '<div class="empty-state">该租户下暂无字段数据</div>';
} else {
container.innerHTML = '<div class="empty-state">没有找到匹配的字段</div>';
}
return;
}
@ -937,7 +1184,7 @@
<tr>
<td>${escapeHtml(field.name)}</td>
<td><code>${escapeHtml(field.filed_code)}</code></td>
<td>${field.state === 1 ? '<span style="color: green;">启用</span>' : '<span style="color: red;">未启用</span>'}</td>
<td>${(field.state === 1 || field.state === '1') ? '<span style="color: green;">启用</span>' : '<span style="color: red;">未启用</span>'}</td>
<td>
<div class="field-actions">
<button class="btn btn-secondary" onclick="editField(${field.id})">编辑</button>
@ -979,7 +1226,8 @@
document.getElementById('fieldName').value = field.name;
document.getElementById('fieldCode').value = field.filed_code;
document.getElementById('fieldType').value = field.field_type;
document.getElementById('fieldState').value = field.state;
// 确保 state 值正确设置(可能是数字或字符串)
document.getElementById('fieldState').value = String(field.state || 1);
document.getElementById('fieldModal').style.display = 'block';
}
@ -996,11 +1244,14 @@
const fieldId = document.getElementById('fieldId').value;
const isEdit = !!fieldId;
const url = isEdit ? `/api/fields/${fieldId}` : '/api/fields';
const url = isEdit ? `/api/field-management/fields/${fieldId}` : '/api/field-management/fields';
const method = isEdit ? 'PUT' : 'POST';
// 不要转换为数字!直接使用字符串,避免大整数精度丢失
const tenantId = currentTenantId; // 直接使用字符串
const data = {
tenant_id: currentTenantId,
tenant_id: tenantId, // 后端会处理字符串到数字的转换
name: document.getElementById('fieldName').value,
filed_code: document.getElementById('fieldCode').value,
field_type: parseInt(document.getElementById('fieldType').value),
@ -1008,7 +1259,7 @@
};
if (isEdit) {
data.tenant_id = currentTenantId;
data.tenant_id = tenantId;
}
try {
@ -1046,7 +1297,10 @@
}
try {
const response = await fetch(`/api/fields/${fieldId}?tenant_id=${currentTenantId}`, {
// 不要转换为数字!直接使用字符串,避免大整数精度丢失
const tenantId = currentTenantId; // 直接使用字符串
const response = await fetch(`/api/field-management/fields/${fieldId}?tenant_id=${tenantId}`, {
method: 'DELETE'
});
@ -1072,13 +1326,16 @@
}
try {
// 不要转换为数字!直接使用字符串,避免大整数精度丢失
const tenantId = currentTenantId; // 直接使用字符串
const response = await fetch('/api/database/backup', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
tenant_id: currentTenantId
tenant_id: tenantId // 后端会处理字符串到数字的转换
})
});
@ -1134,11 +1391,14 @@
}
try {
// 不要转换为数字!直接使用字符串,避免大整数精度丢失
const tenantId = currentTenantId; // 直接使用字符串
const formData = new FormData();
formData.append('file', file);
formData.append('tenant_id', currentTenantId);
formData.append('tenant_id', tenantId);
const response = await fetch(`/api/database/restore?tenant_id=${currentTenantId}`, {
const response = await fetch(`/api/database/restore?tenant_id=${tenantId}`, {
method: 'POST',
body: formData
});

82
test_api_direct.py Normal file
View File

@ -0,0 +1,82 @@
"""
直接测试 API查看返回的关联关系数据
"""
import requests
import json
TENANT_ID = 615873064429507639
TEMPLATE_ID = 1765432134276990 # 1.请示报告卡(初核谈话)
API_BASE_URL = 'http://localhost:7500'
def test_api():
"""测试 API"""
print(f"测试租户ID: {TENANT_ID}")
print(f"测试模板ID: {TEMPLATE_ID}")
print("=" * 80)
try:
response = requests.get(
f'{API_BASE_URL}/api/template-field-relations',
params={'tenant_id': TENANT_ID},
timeout=10
)
if response.status_code == 200:
result = response.json()
if result.get('isSuccess'):
data = result.get('data', {})
relations = data.get('relations', {})
# 查找该模板的关联关系
template_id_str = str(TEMPLATE_ID)
related_fields = relations.get(template_id_str, [])
print(f"\n模板ID: {TEMPLATE_ID}")
print(f"关联字段数: {len(related_fields)}")
print(f"关联字段ID: {related_fields}")
# 获取字段列表
input_fields = data.get('input_fields', [])
output_fields = data.get('output_fields', [])
input_field_ids = {f['id'] for f in input_fields}
output_field_ids = {f['id'] for f in output_fields}
related_input = [fid for fid in related_fields if fid in input_field_ids]
related_output = [fid for fid in related_fields if fid in output_field_ids]
print(f"\n关联的输入字段ID: {related_input}")
print(f"关联的输出字段ID: {related_output}")
if related_input:
print(f"\n关联的输入字段详情:")
for fid in related_input:
field = next((f for f in input_fields if f['id'] == fid), None)
if field:
print(f" - {field['name']} (ID: {fid})")
if related_output:
print(f"\n关联的输出字段详情:")
for fid in related_output[:10]:
field = next((f for f in output_fields if f['id'] == fid), None)
if field:
print(f" - {field['name']} (ID: {fid})")
else:
print(f"\n[警告] 没有找到关联的输出字段")
# 检查其他模板是否有输出字段关联
print(f"\n检查其他模板的输出字段关联:")
for key, fields in list(relations.items())[:5]:
template_id = int(key)
output_count = len([fid for fid in fields if fid in output_field_ids])
if output_count > 0:
print(f" 模板ID {template_id}: {output_count} 个输出字段")
else:
print(f"API 返回错误: {result.get('errorMsg')}")
else:
print(f"API 请求失败: {response.status_code}")
except Exception as e:
print(f"API 请求异常: {e}")
if __name__ == '__main__':
test_api()

93
test_api_response.py Normal file
View File

@ -0,0 +1,93 @@
"""
测试 API 响应检查是否有 bytes 序列化问题
"""
import pymysql
import os
import json
from dotenv import load_dotenv
load_dotenv()
# 数据库连接配置
DB_CONFIG = {
'host': os.getenv('DB_HOST', '152.136.177.240'),
'port': int(os.getenv('DB_PORT', 5012)),
'user': os.getenv('DB_USER', 'finyx'),
'password': os.getenv('DB_PASSWORD', '6QsGK6MpePZDE57Z'),
'database': os.getenv('DB_NAME', 'finyx'),
'charset': 'utf8mb4'
}
def clean_query_result(data):
"""
清理查询结果 bytes 类型转换为字符串
用于处理数据库查询结果中的 BLOB 等字段
"""
if isinstance(data, bytes):
try:
return data.decode('utf-8')
except UnicodeDecodeError:
return data.decode('utf-8', errors='ignore')
elif isinstance(data, dict):
return {key: clean_query_result(value) for key, value in data.items()}
elif isinstance(data, list):
return [clean_query_result(item) for item in data]
elif isinstance(data, (int, float, str, bool, type(None))):
return data
else:
# 对于其他类型(如 Decimal, datetime转换为字符串
try:
return str(data)
except:
return data
def test_field_query():
"""测试字段查询"""
conn = pymysql.connect(**DB_CONFIG)
cursor = conn.cursor(pymysql.cursors.DictCursor)
try:
tenant_id = 615873064429507639
print("测试查询字段...")
cursor.execute("""
SELECT id, name, filed_code, field_type, state
FROM f_polic_field
WHERE tenant_id = %s
ORDER BY field_type, name
LIMIT 5
""", (tenant_id,))
fields = cursor.fetchall()
print(f"查询到 {len(fields)} 条记录")
# 检查数据类型
if fields:
print("\n第一条记录的原始数据:")
first_field = fields[0]
for key, value in first_field.items():
print(f" {key}: {type(value).__name__} = {repr(value)}")
# 清理数据
cleaned_fields = [clean_query_result(field) for field in fields]
print("\n清理后的第一条记录:")
first_cleaned = cleaned_fields[0]
for key, value in first_cleaned.items():
print(f" {key}: {type(value).__name__} = {repr(value)}")
# 测试 JSON 序列化
print("\n测试 JSON 序列化...")
try:
json_str = json.dumps(cleaned_fields, ensure_ascii=False, indent=2, default=str)
print("✓ JSON 序列化成功")
print(f"JSON 长度: {len(json_str)} 字符")
except Exception as e:
print(f"✗ JSON 序列化失败: {e}")
print(f"错误类型: {type(e).__name__}")
finally:
cursor.close()
conn.close()
if __name__ == '__main__':
test_field_query()

306
test_field_relations.py Normal file
View File

@ -0,0 +1,306 @@
"""
测试字段关联关系 API验证返回结果是否与数据库一致
"""
import pymysql
import os
import json
import requests
import sys
from dotenv import load_dotenv
# 设置输出编码为UTF-8
if sys.platform == 'win32':
import io
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
load_dotenv()
# 数据库连接配置
DB_CONFIG = {
'host': os.getenv('DB_HOST', '152.136.177.240'),
'port': int(os.getenv('DB_PORT', 5012)),
'user': os.getenv('DB_USER', 'finyx'),
'password': os.getenv('DB_PASSWORD', '6QsGK6MpePZDE57Z'),
'database': os.getenv('DB_NAME', 'finyx'),
'charset': 'utf8mb4'
}
TENANT_ID = 615873064429507639
API_BASE_URL = 'http://localhost:7500'
def clean_query_result(data):
"""清理查询结果,将 bytes 类型转换为字符串"""
if isinstance(data, bytes):
try:
return data.decode('utf-8')
except UnicodeDecodeError:
return data.decode('utf-8', errors='ignore')
elif isinstance(data, dict):
return {key: clean_query_result(value) for key, value in data.items()}
elif isinstance(data, list):
return [clean_query_result(item) for item in data]
elif isinstance(data, (int, float, str, bool, type(None))):
return data
else:
try:
return str(data)
except:
return data
def get_database_relations(conn, tenant_id):
"""从数据库获取关联关系"""
cursor = conn.cursor(pymysql.cursors.DictCursor)
# 获取所有模板
cursor.execute("""
SELECT id, name, template_code
FROM f_polic_file_config
WHERE tenant_id = %s AND state = 1
ORDER BY name
""", (tenant_id,))
templates = cursor.fetchall()
templates = [clean_query_result(t) for t in templates]
# 获取所有输入字段
cursor.execute("""
SELECT id, name, filed_code, field_type, state
FROM f_polic_field
WHERE tenant_id = %s AND field_type = 1
ORDER BY name
""", (tenant_id,))
input_fields = cursor.fetchall()
input_fields = [clean_query_result(f) for f in input_fields]
for field in input_fields:
if 'state' in field:
try:
field['state'] = int(field['state'])
except:
field['state'] = 1
# 获取所有输出字段
cursor.execute("""
SELECT id, name, filed_code, field_type, state
FROM f_polic_field
WHERE tenant_id = %s AND field_type = 2
ORDER BY name
""", (tenant_id,))
output_fields = cursor.fetchall()
output_fields = [clean_query_result(f) for f in output_fields]
for field in output_fields:
if 'state' in field:
try:
field['state'] = int(field['state'])
except:
field['state'] = 1
# 获取关联关系
cursor.execute("""
SELECT fff.file_id, fff.filed_id
FROM f_polic_file_field fff
INNER JOIN f_polic_file_config fc ON fff.file_id = fc.id AND fff.tenant_id = fc.tenant_id
WHERE fff.tenant_id = %s AND fff.state = 1
""", (tenant_id,))
relations = cursor.fetchall()
relations = [clean_query_result(r) for r in relations]
# 构建关联关系映射
relation_map = {}
for rel in relations:
file_id = rel['file_id']
filed_id = rel['filed_id']
try:
file_id = int(file_id)
filed_id = int(filed_id)
except:
continue
if file_id not in relation_map:
relation_map[file_id] = []
relation_map[file_id].append(filed_id)
cursor.close()
return {
'templates': templates,
'input_fields': input_fields,
'output_fields': output_fields,
'relations': relation_map
}
def get_api_relations(tenant_id):
"""从 API 获取关联关系"""
try:
response = requests.get(
f'{API_BASE_URL}/api/template-field-relations',
params={'tenant_id': tenant_id},
timeout=10
)
if response.status_code == 200:
result = response.json()
if result.get('isSuccess'):
return result.get('data', {})
else:
print(f"API 返回错误: {result.get('errorMsg')}")
return None
else:
print(f"API 请求失败: {response.status_code}")
return None
except Exception as e:
print(f"API 请求异常: {e}")
return None
def compare_results(db_data, api_data):
"""对比数据库和 API 返回的数据"""
print("=" * 80)
print("对比数据库和 API 返回的数据")
print("=" * 80)
# 对比模板
print("\n1. 对比模板:")
db_templates = {t['id']: t for t in db_data['templates']}
api_templates = {t['id']: t for t in api_data.get('templates', [])}
print(f" 数据库模板数: {len(db_templates)}")
print(f" API 模板数: {len(api_templates)}")
if set(db_templates.keys()) != set(api_templates.keys()):
print(" [ERROR] 模板ID不一致")
print(f" 数据库有但API没有: {set(db_templates.keys()) - set(api_templates.keys())}")
print(f" API有但数据库没有: {set(api_templates.keys()) - set(db_templates.keys())}")
else:
print(" [OK] 模板ID一致")
# 对比输入字段
print("\n2. 对比输入字段:")
db_input_fields = {f['id']: f for f in db_data['input_fields']}
api_input_fields = {f['id']: f for f in api_data.get('input_fields', [])}
print(f" 数据库输入字段数: {len(db_input_fields)}")
print(f" API 输入字段数: {len(api_input_fields)}")
if set(db_input_fields.keys()) != set(api_input_fields.keys()):
print(" [ERROR] 输入字段ID不一致")
print(f" 数据库有但API没有: {set(db_input_fields.keys()) - set(api_input_fields.keys())}")
print(f" API有但数据库没有: {set(api_input_fields.keys()) - set(db_input_fields.keys())}")
else:
print(" [OK] 输入字段ID一致")
# 对比输出字段
print("\n3. 对比输出字段:")
db_output_fields = {f['id']: f for f in db_data['output_fields']}
api_output_fields = {f['id']: f for f in api_data.get('output_fields', [])}
print(f" 数据库输出字段数: {len(db_output_fields)}")
print(f" API 输出字段数: {len(api_output_fields)}")
if set(db_output_fields.keys()) != set(api_output_fields.keys()):
print(" [ERROR] 输出字段ID不一致")
print(f" 数据库有但API没有: {set(db_output_fields.keys()) - set(api_output_fields.keys())}")
print(f" API有但数据库没有: {set(api_output_fields.keys()) - set(db_output_fields.keys())}")
else:
print(" [OK] 输出字段ID一致")
# 对比关联关系
print("\n4. 对比关联关系:")
db_relations = db_data['relations']
api_relations = api_data.get('relations', {})
print(f" 数据库关联模板数: {len(db_relations)}")
print(f" API 关联模板数: {len(api_relations)}")
# 检查每个模板的关联关系
all_template_ids = set(db_relations.keys()) | set(api_relations.keys())
mismatch_count = 0
for template_id in all_template_ids:
db_field_ids = set(db_relations.get(template_id, []))
api_field_ids = set(api_relations.get(template_id, []))
if db_field_ids != api_field_ids:
mismatch_count += 1
template_name = db_templates.get(template_id, {}).get('name', f'ID:{template_id}')
print(f"\n [ERROR] 模板 '{template_name}' (ID: {template_id}) 关联关系不一致:")
print(f" 数据库关联字段: {sorted(db_field_ids)}")
print(f" API 关联字段: {sorted(api_field_ids)}")
print(f" 数据库有但API没有: {sorted(db_field_ids - api_field_ids)}")
print(f" API有但数据库没有: {sorted(api_field_ids - db_field_ids)}")
# 检查字段类型
db_input = db_field_ids & set(db_input_fields.keys())
db_output = db_field_ids & set(db_output_fields.keys())
api_input = api_field_ids & set(api_input_fields.keys())
api_output = api_field_ids & set(api_output_fields.keys())
print(f" 数据库 - 输入字段: {sorted(db_input)}, 输出字段: {sorted(db_output)}")
print(f" API - 输入字段: {sorted(api_input)}, 输出字段: {sorted(api_output)}")
if mismatch_count == 0:
print(" [OK] 所有模板的关联关系都一致")
else:
print(f"\n [ERROR] 共 {mismatch_count} 个模板的关联关系不一致")
# 详细检查前3个模板
print("\n5. 详细检查前3个模板的关联关系:")
template_ids = sorted(all_template_ids)[:3]
for template_id in template_ids:
template_name = db_templates.get(template_id, {}).get('name', f'ID:{template_id}')
db_field_ids = set(db_relations.get(template_id, []))
api_field_ids = set(api_relations.get(template_id, []))
print(f"\n 模板: {template_name} (ID: {template_id})")
print(f" 数据库关联字段数: {len(db_field_ids)}")
print(f" API 关联字段数: {len(api_field_ids)}")
if db_field_ids:
db_input = sorted(db_field_ids & set(db_input_fields.keys()))
db_output = sorted(db_field_ids & set(db_output_fields.keys()))
print(f" 数据库关联 - 输入字段({len(db_input)}): {db_input[:5]}{'...' if len(db_input) > 5 else ''}")
print(f" 数据库关联 - 输出字段({len(db_output)}): {db_output[:5]}{'...' if len(db_output) > 5 else ''}")
if api_field_ids:
api_input = sorted(api_field_ids & set(api_input_fields.keys()))
api_output = sorted(api_field_ids & set(api_output_fields.keys()))
print(f" API 关联 - 输入字段({len(api_input)}): {api_input[:5]}{'...' if len(api_input) > 5 else ''}")
print(f" API 关联 - 输出字段({len(api_output)}): {api_output[:5]}{'...' if len(api_output) > 5 else ''}")
def main():
"""主函数"""
print("开始测试字段关联关系 API...")
print(f"租户ID: {TENANT_ID}")
print(f"API地址: {API_BASE_URL}")
# 从数据库获取数据
print("\n从数据库获取数据...")
conn = pymysql.connect(**DB_CONFIG)
try:
db_data = get_database_relations(conn, TENANT_ID)
print(f"[OK] 数据库查询完成")
print(f" 模板数: {len(db_data['templates'])}")
print(f" 输入字段数: {len(db_data['input_fields'])}")
print(f" 输出字段数: {len(db_data['output_fields'])}")
print(f" 关联关系数: {sum(len(v) for v in db_data['relations'].values())}")
finally:
conn.close()
# 从 API 获取数据
print("\n从 API 获取数据...")
api_data = get_api_relations(TENANT_ID)
if api_data:
print(f"[OK] API 查询完成")
print(f" 模板数: {len(api_data.get('templates', []))}")
print(f" 输入字段数: {len(api_data.get('input_fields', []))}")
print(f" 输出字段数: {len(api_data.get('output_fields', []))}")
print(f" 关联关系数: {sum(len(v) for v in api_data.get('relations', {}).values())}")
else:
print("[ERROR] API 查询失败")
return
# 对比结果
compare_results(db_data, api_data)
print("\n" + "=" * 80)
print("测试完成")
print("=" * 80)
if __name__ == '__main__':
main()

167
test_relations_detailed.py Normal file
View File

@ -0,0 +1,167 @@
"""
详细测试关联关系数据
"""
import pymysql
import os
import json
import requests
from dotenv import load_dotenv
load_dotenv()
DB_CONFIG = {
'host': os.getenv('DB_HOST', '152.136.177.240'),
'port': int(os.getenv('DB_PORT', 5012)),
'user': os.getenv('DB_USER', 'finyx'),
'password': os.getenv('DB_PASSWORD', '6QsGK6MpePZDE57Z'),
'database': os.getenv('DB_NAME', 'finyx'),
'charset': 'utf8mb4'
}
TENANT_ID = 615873064429507639
API_BASE_URL = 'http://localhost:7500'
def test_specific_template():
"""测试特定模板的关联关系"""
conn = pymysql.connect(**DB_CONFIG)
cursor = conn.cursor(pymysql.cursors.DictCursor)
try:
# 测试模板1.请示报告卡(初核谈话)
template_name = "1.请示报告卡(初核谈话)"
print(f"测试模板: {template_name}")
print("=" * 80)
# 1. 从数据库查询模板ID
cursor.execute("""
SELECT id, name
FROM f_polic_file_config
WHERE tenant_id = %s AND name = %s
""", (TENANT_ID, template_name))
template = cursor.fetchone()
if not template:
print(f"模板 '{template_name}' 不存在")
return
template_id = template['id']
print(f"模板ID: {template_id} (类型: {type(template_id).__name__})")
# 2. 查询该模板的所有关联关系
cursor.execute("""
SELECT fff.file_id, fff.filed_id, fff.state, f.name as field_name, f.field_type
FROM f_polic_file_field fff
INNER JOIN f_polic_field f ON fff.filed_id = f.id AND fff.tenant_id = f.tenant_id
WHERE fff.tenant_id = %s AND fff.file_id = %s
ORDER BY f.field_type, f.name
""", (TENANT_ID, template_id))
relations = cursor.fetchall()
print(f"\n数据库中的关联关系数: {len(relations)}")
print(f"启用的关联关系数: {len([r for r in relations if r['state'] == 1])}")
input_fields = [r for r in relations if r['field_type'] == 1]
output_fields = [r for r in relations if r['field_type'] == 2]
print(f"\n输入字段关联: {len(input_fields)}")
for r in input_fields:
print(f" - {r['field_name']} (ID: {r['filed_id']}, state: {r['state']})")
print(f"\n输出字段关联: {len(output_fields)}")
for r in output_fields[:10]: # 只显示前10个
print(f" - {r['field_name']} (ID: {r['filed_id']}, state: {r['state']})")
if len(output_fields) > 10:
print(f" ... 还有 {len(output_fields) - 10} 个输出字段")
# 3. 调用 API 获取数据
print("\n" + "=" * 80)
print("调用 API 获取数据...")
try:
response = requests.get(
f'{API_BASE_URL}/api/template-field-relations',
params={'tenant_id': TENANT_ID},
timeout=10
)
if response.status_code == 200:
result = response.json()
if result.get('isSuccess'):
api_data = result.get('data', {})
api_relations = api_data.get('relations', {})
# 检查 relations 的 key 类型
print(f"\nAPI 返回的 relations 对象:")
print(f" relations 类型: {type(api_relations).__name__}")
print(f" relations key 数量: {len(api_relations)}")
# 尝试用不同的 key 类型查找
template_id_str = str(template_id)
template_id_int = int(template_id)
print(f"\n尝试查找模板关联关系:")
print(f" 使用字符串 key '{template_id_str}': {template_id_str in api_relations}")
print(f" 使用数字 key {template_id_int}: {template_id_int in api_relations}")
# 检查 relations 的所有 key
sample_keys = list(api_relations.keys())[:5]
print(f"\n relations 的 key 示例 (前5个):")
for key in sample_keys:
print(f" key: {repr(key)}, 类型: {type(key).__name__}, 值数量: {len(api_relations[key])}")
# 查找该模板的关联关系
related_field_ids = None
if template_id_str in api_relations:
related_field_ids = api_relations[template_id_str]
print(f"\n 找到关联关系 (使用字符串key): {len(related_field_ids)} 个字段")
elif template_id_int in api_relations:
related_field_ids = api_relations[template_id_int]
print(f"\n 找到关联关系 (使用数字key): {len(related_field_ids)} 个字段")
else:
print(f"\n 未找到该模板的关联关系")
print(f" 可用的模板ID: {list(api_relations.keys())[:10]}")
if related_field_ids:
print(f"\n API 返回的关联字段ID: {related_field_ids[:10]}{'...' if len(related_field_ids) > 10 else ''}")
# 检查字段类型
api_input_fields = api_data.get('input_fields', [])
api_output_fields = api_data.get('output_fields', [])
input_field_ids = {f['id'] for f in api_input_fields}
output_field_ids = {f['id'] for f in api_output_fields}
related_input = [fid for fid in related_field_ids if fid in input_field_ids]
related_output = [fid for fid in related_field_ids if fid in output_field_ids]
print(f"\n 关联的输入字段ID: {related_input}")
print(f" 关联的输出字段ID: {related_output[:10]}{'...' if len(related_output) > 10 else ''}")
# 对比数据库和 API
db_field_ids = {r['filed_id'] for r in relations if r['state'] == 1}
api_field_ids = set(related_field_ids)
print(f"\n 对比结果:")
print(f" 数据库关联字段数: {len(db_field_ids)}")
print(f" API 关联字段数: {len(api_field_ids)}")
print(f" 一致: {db_field_ids == api_field_ids}")
if db_field_ids != api_field_ids:
missing_in_api = db_field_ids - api_field_ids
extra_in_api = api_field_ids - db_field_ids
if missing_in_api:
print(f" 数据库有但API没有: {sorted(list(missing_in_api))[:10]}")
if extra_in_api:
print(f" API有但数据库没有: {sorted(list(extra_in_api))[:10]}")
else:
print(f"API 返回错误: {result.get('errorMsg')}")
else:
print(f"API 请求失败: {response.status_code}")
except Exception as e:
print(f"API 请求异常: {e}")
finally:
cursor.close()
conn.close()
if __name__ == '__main__':
test_specific_template()

60
test_tenant_ids_api.py Normal file
View File

@ -0,0 +1,60 @@
"""
测试 tenant-ids API
"""
import pymysql
import os
from dotenv import load_dotenv
load_dotenv()
# 数据库连接配置
DB_CONFIG = {
'host': os.getenv('DB_HOST', '152.136.177.240'),
'port': int(os.getenv('DB_PORT', 5012)),
'user': os.getenv('DB_USER', 'finyx'),
'password': os.getenv('DB_PASSWORD', '6QsGK6MpePZDE57Z'),
'database': os.getenv('DB_NAME', 'finyx'),
'charset': 'utf8mb4'
}
def test_query():
"""测试查询逻辑"""
conn = pymysql.connect(**DB_CONFIG)
cursor = conn.cursor(pymysql.cursors.DictCursor)
try:
print("测试查询所有 tenant_id...")
cursor.execute("""
SELECT DISTINCT tenant_id
FROM (
SELECT tenant_id FROM f_polic_field WHERE tenant_id IS NOT NULL
UNION
SELECT tenant_id FROM f_polic_file_config WHERE tenant_id IS NOT NULL
UNION
SELECT tenant_id FROM f_polic_file_field WHERE tenant_id IS NOT NULL
) AS all_tenants
ORDER BY tenant_id
""")
tenant_ids = [row['tenant_id'] for row in cursor.fetchall()]
print(f"查询结果: {tenant_ids}")
print(f"数量: {len(tenant_ids)}")
print(f"类型: {[type(tid).__name__ for tid in tenant_ids]}")
# 测试 JSON 序列化
import json
result = {
'isSuccess': True,
'data': {
'tenant_ids': tenant_ids
}
}
json_str = json.dumps(result, ensure_ascii=False, indent=2)
print(f"\nJSON 序列化结果:\n{json_str}")
finally:
cursor.close()
conn.close()
if __name__ == '__main__':
test_query()

View File

@ -0,0 +1,456 @@
"""
扫描 template_finish/ 目录下的模板文档分析占位符更新关联关系
"""
import os
import re
import sys
import pymysql
from pathlib import Path
from docx import Document
from datetime import datetime
from typing import Dict, List, Set, Optional
from dotenv import load_dotenv
# 设置输出编码为UTF-8
if sys.platform == 'win32':
import io
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
load_dotenv()
# 数据库连接配置
DB_CONFIG = {
'host': os.getenv('DB_HOST', '152.136.177.240'),
'port': int(os.getenv('DB_PORT', 5012)),
'user': os.getenv('DB_USER', 'finyx'),
'password': os.getenv('DB_PASSWORD', '6QsGK6MpePZDE57Z'),
'database': os.getenv('DB_NAME', 'finyx'),
'charset': 'utf8mb4'
}
TENANT_ID = 615873064429507639
CREATED_BY = 655162080928945152
UPDATED_BY = 655162080928945152
CURRENT_TIME = datetime.now()
# 模板目录
TEMPLATE_DIR = 'template_finish'
def extract_placeholders_from_docx(file_path: str) -> List[str]:
"""
从docx文件中提取所有占位符
Args:
file_path: docx文件路径
Returns:
占位符列表格式: ['field_code1', 'field_code2', ...]
"""
placeholders = set()
pattern = r'\{\{([^}]+)\}\}' # 匹配 {{field_code}} 格式
try:
doc = Document(file_path)
# 从段落中提取占位符
for paragraph in doc.paragraphs:
# 获取段落的所有文本(包括 run 中的文本)
text = ''.join([run.text for run in paragraph.runs])
if not text:
text = paragraph.text
matches = re.findall(pattern, text)
for match in matches:
cleaned = match.strip()
# 过滤掉不完整的占位符(包含 { 或 } 的)
if cleaned and '{' not in cleaned and '}' not in cleaned:
placeholders.add(cleaned)
# 从表格中提取占位符
for table in doc.tables:
for row in table.rows:
for cell in row.cells:
# 获取单元格的所有文本(包括 run 中的文本)
cell_text = ''.join([run.text for para in cell.paragraphs for run in para.runs])
if not cell_text:
cell_text = cell.text
matches = re.findall(pattern, cell_text)
for match in matches:
cleaned = match.strip()
# 过滤掉不完整的占位符(包含 { 或 } 的)
if cleaned and '{' not in cleaned and '}' not in cleaned:
placeholders.add(cleaned)
except Exception as e:
print(f" 错误: 读取文件失败 - {str(e)}")
import traceback
traceback.print_exc()
return []
return sorted(list(placeholders))
def get_field_mapping(conn) -> Dict[str, Dict]:
"""
获取字段映射filed_code -> field_info
返回: {filed_code: {id, name, field_type, state}}
"""
cursor = conn.cursor(pymysql.cursors.DictCursor)
cursor.execute("""
SELECT id, name, filed_code, field_type, state
FROM f_polic_field
WHERE tenant_id = %s
""", (TENANT_ID,))
fields = cursor.fetchall()
cursor.close()
# 构建映射filed_code -> field_info
field_map = {}
for field in fields:
field_code = field['filed_code']
# 处理 bytes 类型的 state
state = field['state']
if isinstance(state, bytes):
state = int.from_bytes(state, byteorder='big') if len(state) == 1 else 1
field_map[field_code] = {
'id': field['id'],
'name': field['name'],
'field_type': field['field_type'],
'state': state
}
return field_map
def get_template_mapping(conn) -> Dict[str, int]:
"""
获取模板映射template_name -> template_id
返回: {template_name: template_id}
"""
cursor = conn.cursor(pymysql.cursors.DictCursor)
cursor.execute("""
SELECT id, name
FROM f_polic_file_config
WHERE tenant_id = %s
""", (TENANT_ID,))
templates = cursor.fetchall()
cursor.close()
# 构建映射name -> id
template_map = {}
for template in templates:
name = template['name']
template_map[name] = template['id']
# 也支持带扩展名的名称
if not name.endswith('.docx'):
template_map[name + '.docx'] = template['id']
return template_map
def normalize_template_name(file_name: str) -> str:
"""
规范化模板名称用于匹配数据库中的名称
"""
# 去掉扩展名
name = file_name.replace('.docx', '')
return name
def update_template_field_relations(conn, template_id: int, field_codes: List[str], field_map: Dict[str, Dict], dry_run: bool = True):
"""
更新模板的字段关联关系
Args:
conn: 数据库连接
template_id: 模板ID
field_codes: 字段编码列表从模板文档中提取的占位符
field_map: 字段映射
dry_run: 是否只是预览不实际更新
"""
cursor = conn.cursor()
try:
# 根据字段编码查找字段ID
field_ids = []
not_found_codes = []
for field_code in field_codes:
if field_code in field_map:
field_info = field_map[field_code]
# 只使用启用的字段state=1
if field_info['state'] == 1:
field_ids.append(field_info['id'])
else:
not_found_codes.append(field_code)
if not_found_codes:
print(f" 警告: 以下字段编码在数据库中不存在: {not_found_codes}")
if not field_ids:
print(f" 警告: 没有找到有效的字段关联")
return
# 获取当前关联关系(包括所有状态的,但只处理 state=1 的)
cursor.execute("""
SELECT filed_id, state
FROM f_polic_file_field
WHERE tenant_id = %s AND file_id = %s
""", (TENANT_ID, template_id))
all_relations = cursor.fetchall()
# 只统计 state=1 的关联关系
current_field_ids = set()
for row in all_relations:
state = row[1]
# 处理 bytes 类型的 state
if isinstance(state, bytes):
state = int.from_bytes(state, byteorder='big') if len(state) == 1 else 0
elif state is None:
state = 0
else:
try:
state = int(state)
except:
state = 0
if state == 1:
current_field_ids.add(row[0])
print(f" 当前关联关系数: {len(current_field_ids)} (期望: {len(field_ids)})")
# 计算需要添加和删除的字段
new_field_ids = set(field_ids)
to_add = new_field_ids - current_field_ids
to_remove = current_field_ids - new_field_ids
if not to_add and not to_remove:
print(f" 无需更新,关联关系已是最新")
return
if dry_run:
print(f" [预览] 将添加 {len(to_add)} 个关联,删除 {len(to_remove)} 个关联")
if to_add:
print(f" 添加: {sorted(list(to_add))[:5]}{'...' if len(to_add) > 5 else ''}")
if to_remove:
print(f" 删除: {sorted(list(to_remove))[:5]}{'...' if len(to_remove) > 5 else ''}")
return
# 删除需要移除的关联关系(软删除,设置 state=0
if to_remove:
placeholders = ','.join(['%s'] * len(to_remove))
cursor.execute(f"""
UPDATE f_polic_file_field
SET state = 0, updated_time = %s, updated_by = %s
WHERE tenant_id = %s AND file_id = %s AND filed_id IN ({placeholders})
""", [CURRENT_TIME, UPDATED_BY, TENANT_ID, template_id] + list(to_remove))
print(f" 已删除 {len(to_remove)} 个关联关系")
# 添加新的关联关系
if to_add:
added_count = 0
updated_count = 0
for field_id in to_add:
# 先检查是否已存在(可能是 state=0 的记录)
cursor.execute("""
SELECT id, state FROM f_polic_file_field
WHERE tenant_id = %s AND file_id = %s AND filed_id = %s
""", (TENANT_ID, template_id, field_id))
existing = cursor.fetchone()
if existing:
# 如果已存在,更新 state=1
cursor.execute("""
UPDATE f_polic_file_field
SET state = 1, updated_time = %s, updated_by = %s
WHERE tenant_id = %s AND file_id = %s AND filed_id = %s
""", (CURRENT_TIME, UPDATED_BY, TENANT_ID, template_id, field_id))
updated_count += 1
else:
# 如果不存在,插入新记录
insert_sql = """
INSERT INTO f_polic_file_field
(tenant_id, file_id, filed_id, created_time, created_by, updated_time, updated_by, state)
VALUES (%s, %s, %s, %s, %s, %s, %s, 1)
"""
cursor.execute(insert_sql, (
TENANT_ID, template_id, field_id,
CURRENT_TIME, CREATED_BY, CURRENT_TIME, CREATED_BY
))
added_count += 1
print(f" 已添加 {added_count} 个新关联,更新 {updated_count} 个现有关联")
conn.commit()
print(f" [OK] 更新成功: 添加 {len(to_add)} 个,删除 {len(to_remove)}")
except Exception as e:
conn.rollback()
raise e
finally:
cursor.close()
def scan_and_update_templates(dry_run: bool = True):
"""
扫描模板目录分析占位符更新关联关系
Args:
dry_run: 是否只是预览不实际更新
"""
print("=" * 80)
print("扫描模板文档并更新关联关系")
print("=" * 80)
print(f"模板目录: {TEMPLATE_DIR}")
print(f"租户ID: {TENANT_ID}")
print(f"模式: {'预览模式(不会实际更新数据库)' if dry_run else '更新模式(会更新数据库)'}")
print()
# 连接数据库
conn = pymysql.connect(**DB_CONFIG)
try:
# 获取字段映射和模板映射
print("加载数据库数据...")
field_map = get_field_mapping(conn)
template_map = get_template_mapping(conn)
print(f" 字段总数: {len(field_map)}")
print(f" 模板总数: {len(template_map)}")
print()
# 扫描模板目录
template_path = Path(TEMPLATE_DIR)
if not template_path.exists():
print(f"错误: 模板目录不存在: {TEMPLATE_DIR}")
return
docx_files = list(template_path.rglob("*.docx"))
# 过滤掉临时文件
docx_files = [f for f in docx_files if not f.name.startswith("~$")]
print(f"找到 {len(docx_files)} 个模板文件")
print()
# 统计信息
processed_count = 0
updated_count = 0
not_found_count = 0
error_count = 0
# 处理每个模板文件
for docx_file in sorted(docx_files):
processed_count += 1
relative_path = docx_file.relative_to(template_path)
template_name = normalize_template_name(docx_file.name)
print(f"[{processed_count}/{len(docx_files)}] {relative_path}")
print(f" 模板名称: {template_name}")
# 查找对应的模板ID
template_id = None
if template_name in template_map:
template_id = template_map[template_name]
elif docx_file.name in template_map:
template_id = template_map[docx_file.name]
else:
print(f" [ERROR] 未找到对应的模板配置")
not_found_count += 1
print()
continue
print(f" 模板ID: {template_id}")
try:
# 提取占位符
placeholders = extract_placeholders_from_docx(str(docx_file))
print(f" 占位符数量: {len(placeholders)}")
if not placeholders:
print(f" [WARN] 未找到占位符")
print()
continue
# 分类输入字段和输出字段
input_fields = []
output_fields = []
not_found_fields = []
for placeholder in placeholders:
if placeholder in field_map:
field_info = field_map[placeholder]
if field_info['field_type'] == 1:
input_fields.append(placeholder)
elif field_info['field_type'] == 2:
output_fields.append(placeholder)
else:
not_found_fields.append(placeholder)
# 所有模板都应该关联两个输入字段(即使模板文档中没有占位符)
# clue_info (线索信息) 和 target_basic_info_clue (被核查人员工作基本情况线索)
required_input_fields = ['clue_info', 'target_basic_info_clue']
for req_field in required_input_fields:
if req_field in field_map:
field_info = field_map[req_field]
# 只添加启用的字段
if field_info['state'] == 1 and req_field not in input_fields:
input_fields.append(req_field)
print(f" 输入字段: {len(input_fields)} (包含必需字段), 输出字段: {len(output_fields)}")
if input_fields:
print(f" 输入字段编码: {input_fields}")
if output_fields:
print(f" 输出字段编码: {output_fields[:10]}{'...' if len(output_fields) > 10 else ''}")
if not_found_fields:
print(f" 未找到的字段编码: {not_found_fields[:5]}{'...' if len(not_found_fields) > 5 else ''}")
# 合并所有字段编码(输入字段 + 输出字段)
all_field_codes = input_fields + output_fields
# 更新关联关系
print(f" 更新关联关系...")
update_template_field_relations(
conn, template_id, all_field_codes, field_map, dry_run=dry_run
)
updated_count += 1
print()
except Exception as e:
print(f" [ERROR] 处理失败: {str(e)}")
import traceback
traceback.print_exc()
error_count += 1
print()
# 输出统计信息
print("=" * 80)
print("处理完成")
print("=" * 80)
print(f"总文件数: {len(docx_files)}")
print(f"处理成功: {updated_count}")
print(f"未找到模板: {not_found_count}")
print(f"处理失败: {error_count}")
if dry_run:
print()
print("注意: 这是预览模式,未实际更新数据库")
print("要实际更新,请运行: python update_template_field_relations_from_docx.py --update")
finally:
conn.close()
if __name__ == '__main__':
import sys
# 检查是否有 --update 参数
dry_run = '--update' not in sys.argv
scan_and_update_templates(dry_run=dry_run)

View File

@ -1,796 +0,0 @@
@Switch01
A_Rog
Aakanksha Agrawal
Abhinav Sagar
ABHYUDAY PRATAP SINGH
abs51295
AceGentile
Adam Chainz
Adam Tse
Adam Wentz
admin
Adolfo Ochagavía
Adrien Morison
Agus
ahayrapetyan
Ahilya
AinsworthK
Akash Srivastava
Alan Yee
Albert Tugushev
Albert-Guan
albertg
Alberto Sottile
Aleks Bunin
Ales Erjavec
Alethea Flowers
Alex Gaynor
Alex Grönholm
Alex Hedges
Alex Loosley
Alex Morega
Alex Stachowiak
Alexander Shtyrov
Alexandre Conrad
Alexey Popravka
Aleš Erjavec
Alli
Ami Fischman
Ananya Maiti
Anatoly Techtonik
Anders Kaseorg
Andre Aguiar
Andreas Lutro
Andrei Geacar
Andrew Gaul
Andrew Shymanel
Andrey Bienkowski
Andrey Bulgakov
Andrés Delfino
Andy Freeland
Andy Kluger
Ani Hayrapetyan
Aniruddha Basak
Anish Tambe
Anrs Hu
Anthony Sottile
Antoine Musso
Anton Ovchinnikov
Anton Patrushev
Antonio Alvarado Hernandez
Antony Lee
Antti Kaihola
Anubhav Patel
Anudit Nagar
Anuj Godase
AQNOUCH Mohammed
AraHaan
arena
arenasys
Arindam Choudhury
Armin Ronacher
Arnon Yaari
Artem
Arun Babu Neelicattu
Ashley Manton
Ashwin Ramaswami
atse
Atsushi Odagiri
Avinash Karhana
Avner Cohen
Awit (Ah-Wit) Ghirmai
Baptiste Mispelon
Barney Gale
barneygale
Bartek Ogryczak
Bastian Venthur
Ben Bodenmiller
Ben Darnell
Ben Hoyt
Ben Mares
Ben Rosser
Bence Nagy
Benjamin Peterson
Benjamin VanEvery
Benoit Pierre
Berker Peksag
Bernard
Bernard Tyers
Bernardo B. Marques
Bernhard M. Wiedemann
Bertil Hatt
Bhavam Vidyarthi
Blazej Michalik
Bogdan Opanchuk
BorisZZZ
Brad Erickson
Bradley Ayers
Branch Vincent
Brandon L. Reiss
Brandt Bucher
Brannon Dorsey
Brett Randall
Brett Rosen
Brian Cristante
Brian Rosner
briantracy
BrownTruck
Bruno Oliveira
Bruno Renié
Bruno S
Bstrdsmkr
Buck Golemon
burrows
Bussonnier Matthias
bwoodsend
c22
Caleb Martinez
Calvin Smith
Carl Meyer
Carlos Liam
Carol Willing
Carter Thayer
Cass
Chandrasekhar Atina
Charlie Marsh
Chih-Hsuan Yen
Chris Brinker
Chris Hunt
Chris Jerdonek
Chris Kuehl
Chris Markiewicz
Chris McDonough
Chris Pawley
Chris Pryer
Chris Wolfe
Christian Clauss
Christian Heimes
Christian Oudard
Christoph Reiter
Christopher Hunt
Christopher Snyder
chrysle
cjc7373
Clark Boylan
Claudio Jolowicz
Clay McClure
Cody
Cody Soyland
Colin Watson
Collin Anderson
Connor Osborn
Cooper Lees
Cooper Ry Lees
Cory Benfield
Cory Wright
Craig Kerstiens
Cristian Sorinel
Cristina
Cristina Muñoz
ctg123
Curtis Doty
cytolentino
Daan De Meyer
Dale
Damian
Damian Quiroga
Damian Shaw
Dan Black
Dan Savilonis
Dan Sully
Dane Hillard
daniel
Daniel Collins
Daniel Hahler
Daniel Holth
Daniel Jost
Daniel Katz
Daniel Shaulov
Daniele Esposti
Daniele Nicolodi
Daniele Procida
Daniil Konovalenko
Danny Hermes
Danny McClanahan
Darren Kavanagh
Dav Clark
Dave Abrahams
Dave Jones
David Aguilar
David Black
David Bordeynik
David Caro
David D Lowe
David Evans
David Hewitt
David Linke
David Poggi
David Poznik
David Pursehouse
David Runge
David Tucker
David Wales
Davidovich
ddelange
Deepak Sharma
Deepyaman Datta
Denise Yu
dependabot[bot]
derwolfe
Desetude
Devesh Kumar Singh
devsagul
Diego Caraballo
Diego Ramirez
DiegoCaraballo
Dimitri Merejkowsky
Dimitri Papadopoulos
Dirk Stolle
Dmitry Gladkov
Dmitry Volodin
Domen Kožar
Dominic Davis-Foster
Donald Stufft
Dongweiming
doron zarhi
Dos Moonen
Douglas Thor
DrFeathers
Dustin Ingram
Dustin Rodrigues
Dwayne Bailey
Ed Morley
Edgar Ramírez
Edgar Ramírez Mondragón
Ee Durbin
Efflam Lemaillet
efflamlemaillet
Eitan Adler
ekristina
elainechan
Eli Schwartz
Elisha Hollander
Ellen Marie Dash
Emil Burzo
Emil Styrke
Emmanuel Arias
Endoh Takanao
enoch
Erdinc Mutlu
Eric Cousineau
Eric Gillingham
Eric Hanchrow
Eric Hopper
Erik M. Bray
Erik Rose
Erwin Janssen
Eugene Vereshchagin
everdimension
Federico
Felipe Peter
Felix Yan
fiber-space
Filip Kokosiński
Filipe Laíns
Finn Womack
finnagin
Flavio Amurrio
Florian Briand
Florian Rathgeber
Francesco
Francesco Montesano
Fredrik Orderud
Frost Ming
Gabriel Curio
Gabriel de Perthuis
Garry Polley
gavin
gdanielson
Geoffrey Sneddon
George Song
Georgi Valkov
Georgy Pchelkin
ghost
Giftlin Rajaiah
gizmoguy1
gkdoc
Godefroid Chapelle
Gopinath M
GOTO Hayato
gousaiyang
gpiks
Greg Roodt
Greg Ward
Guilherme Espada
Guillaume Seguin
gutsytechster
Guy Rozendorn
Guy Tuval
gzpan123
Hanjun Kim
Hari Charan
Harsh Vardhan
harupy
Harutaka Kawamura
hauntsaninja
Henrich Hartzer
Henry Schreiner
Herbert Pfennig
Holly Stotelmyer
Honnix
Hsiaoming Yang
Hugo Lopes Tavares
Hugo van Kemenade
Hugues Bruant
Hynek Schlawack
Ian Bicking
Ian Cordasco
Ian Lee
Ian Stapleton Cordasco
Ian Wienand
Igor Kuzmitshov
Igor Sobreira
Ikko Ashimine
Ilan Schnell
Illia Volochii
Ilya Baryshev
Inada Naoki
Ionel Cristian Mărieș
Ionel Maries Cristian
Itamar Turner-Trauring
Ivan Pozdeev
J. Nick Koston
Jacob Kim
Jacob Walls
Jaime Sanz
jakirkham
Jakub Kuczys
Jakub Stasiak
Jakub Vysoky
Jakub Wilk
James Cleveland
James Curtin
James Firth
James Gerity
James Polley
Jan Pokorný
Jannis Leidel
Jarek Potiuk
jarondl
Jason Curtis
Jason R. Coombs
JasonMo
JasonMo1
Jay Graves
Jean Abou Samra
Jean-Christophe Fillion-Robin
Jeff Barber
Jeff Dairiki
Jeff Widman
Jelmer Vernooij
jenix21
Jeremy Fleischman
Jeremy Stanley
Jeremy Zafran
Jesse Rittner
Jiashuo Li
Jim Fisher
Jim Garrison
Jinzhe Zeng
Jiun Bae
Jivan Amara
Joe Bylund
Joe Michelini
John Paton
John Sirois
John T. Wodder II
John-Scott Atlakson
johnthagen
Jon Banafato
Jon Dufresne
Jon Parise
Jonas Nockert
Jonathan Herbert
Joonatan Partanen
Joost Molenaar
Jorge Niedbalski
Joseph Bylund
Joseph Long
Josh Bronson
Josh Cannon
Josh Hansen
Josh Schneier
Joshua
Juan Luis Cano Rodríguez
Juanjo Bazán
Judah Rand
Julian Berman
Julian Gethmann
Julien Demoor
Jussi Kukkonen
jwg4
Jyrki Pulliainen
Kai Chen
Kai Mueller
Kamal Bin Mustafa
kasium
kaustav haldar
keanemind
Keith Maxwell
Kelsey Hightower
Kenneth Belitzky
Kenneth Reitz
Kevin Burke
Kevin Carter
Kevin Frommelt
Kevin R Patterson
Kexuan Sun
Kit Randel
Klaas van Schelven
KOLANICH
konstin
kpinc
Krishna Oza
Kumar McMillan
Kuntal Majumder
Kurt McKee
Kyle Persohn
lakshmanaram
Laszlo Kiss-Kollar
Laurent Bristiel
Laurent LAPORTE
Laurie O
Laurie Opperman
layday
Leon Sasson
Lev Givon
Lincoln de Sousa
Lipis
lorddavidiii
Loren Carvalho
Lucas Cimon
Ludovic Gasc
Luis Medel
Lukas Geiger
Lukas Juhrich
Luke Macken
Luo Jiebin
luojiebin
luz.paz
László Kiss Kollár
M00nL1ght
Marc Abramowitz
Marc Tamlyn
Marcus Smith
Mariatta
Mark Kohler
Mark McLoughlin
Mark Williams
Markus Hametner
Martey Dodoo
Martin Fischer
Martin Häcker
Martin Pavlasek
Masaki
Masklinn
Matej Stuchlik
Mathew Jennings
Mathieu Bridon
Mathieu Kniewallner
Matt Bacchi
Matt Good
Matt Maker
Matt Robenolt
Matt Wozniski
matthew
Matthew Einhorn
Matthew Feickert
Matthew Gilliard
Matthew Hughes
Matthew Iversen
Matthew Treinish
Matthew Trumbell
Matthew Willson
Matthias Bussonnier
mattip
Maurits van Rees
Max W Chase
Maxim Kurnikov
Maxime Rouyrre
mayeut
mbaluna
mdebi
memoselyk
meowmeowcat
Michael
Michael Aquilina
Michael E. Karpeles
Michael Klich
Michael Mintz
Michael Williamson
michaelpacer
Michał Górny
Mickaël Schoentgen
Miguel Araujo Perez
Mihir Singh
Mike
Mike Hendricks
Min RK
MinRK
Miro Hrončok
Monica Baluna
montefra
Monty Taylor
morotti
mrKazzila
Muha Ajjan
Nadav Wexler
Nahuel Ambrosini
Nate Coraor
Nate Prewitt
Nathan Houghton
Nathaniel J. Smith
Nehal J Wani
Neil Botelho
Nguyễn Gia Phong
Nicholas Serra
Nick Coghlan
Nick Stenning
Nick Timkovich
Nicolas Bock
Nicole Harris
Nikhil Benesch
Nikhil Ladha
Nikita Chepanov
Nikolay Korolev
Nipunn Koorapati
Nitesh Sharma
Niyas Sait
Noah
Noah Gorny
Nowell Strite
NtaleGrey
nvdv
OBITORASU
Ofek Lev
ofrinevo
Oliver Freund
Oliver Jeeves
Oliver Mannion
Oliver Tonnhofer
Olivier Girardot
Olivier Grisel
Ollie Rutherfurd
OMOTO Kenji
Omry Yadan
onlinejudge95
Oren Held
Oscar Benjamin
Oz N Tiram
Pachwenko
Patrick Dubroy
Patrick Jenkins
Patrick Lawson
patricktokeeffe
Patrik Kopkan
Paul Ganssle
Paul Kehrer
Paul Moore
Paul Nasrat
Paul Oswald
Paul van der Linden
Paulus Schoutsen
Pavel Safronov
Pavithra Eswaramoorthy
Pawel Jasinski
Paweł Szramowski
Pekka Klärck
Peter Gessler
Peter Lisák
Peter Shen
Peter Waller
Petr Viktorin
petr-tik
Phaneendra Chiruvella
Phil Elson
Phil Freo
Phil Pennock
Phil Whelan
Philip Jägenstedt
Philip Molloy
Philippe Ombredanne
Pi Delport
Pierre-Yves Rofes
Pieter Degroote
pip
Prabakaran Kumaresshan
Prabhjyotsing Surjit Singh Sodhi
Prabhu Marappan
Pradyun Gedam
Prashant Sharma
Pratik Mallya
pre-commit-ci[bot]
Preet Thakkar
Preston Holmes
Przemek Wrzos
Pulkit Goyal
q0w
Qiangning Hong
Qiming Xu
Quentin Lee
Quentin Pradet
R. David Murray
Rafael Caricio
Ralf Schmitt
Ran Benita
Razzi Abuissa
rdb
Reece Dunham
Remi Rampin
Rene Dudfield
Riccardo Magliocchetti
Riccardo Schirone
Richard Jones
Richard Si
Ricky Ng-Adam
Rishi
rmorotti
RobberPhex
Robert Collins
Robert McGibbon
Robert Pollak
Robert T. McGibbon
robin elisha robinson
Roey Berman
Rohan Jain
Roman Bogorodskiy
Roman Donchenko
Romuald Brunet
ronaudinho
Ronny Pfannschmidt
Rory McCann
Ross Brattain
Roy Wellington Ⅳ
Ruairidh MacLeod
Russell Keith-Magee
Ryan Shepherd
Ryan Wooden
ryneeverett
S. Guliaev
Sachi King
Salvatore Rinchiera
sandeepkiran-js
Sander Van Balen
Savio Jomton
schlamar
Scott Kitterman
Sean
seanj
Sebastian Jordan
Sebastian Schaetz
Segev Finer
SeongSoo Cho
Sergey Vasilyev
Seth Michael Larson
Seth Woodworth
Shahar Epstein
Shantanu
shenxianpeng
shireenrao
Shivansh-007
Shixian Sheng
Shlomi Fish
Shovan Maity
Simeon Visser
Simon Cross
Simon Pichugin
sinoroc
sinscary
snook92
socketubs
Sorin Sbarnea
Srinivas Nyayapati
Stavros Korokithakis
Stefan Scherfke
Stefano Rivera
Stephan Erb
Stephen Rosen
stepshal
Steve (Gadget) Barnes
Steve Barnes
Steve Dower
Steve Kowalik
Steven Myint
Steven Silvester
stonebig
studioj
Stéphane Bidoul
Stéphane Bidoul (ACSONE)
Stéphane Klein
Sumana Harihareswara
Surbhi Sharma
Sviatoslav Sydorenko
Sviatoslav Sydorenko (Святослав Сидоренко)
Swat009
Sylvain
Takayuki SHIMIZUKAWA
Taneli Hukkinen
tbeswick
Thiago
Thijs Triemstra
Thomas Fenzl
Thomas Grainger
Thomas Guettler
Thomas Johansson
Thomas Kluyver
Thomas Smith
Thomas VINCENT
Tim D. Smith
Tim Gates
Tim Harder
Tim Heap
tim smith
tinruufu
Tobias Hermann
Tom Forbes
Tom Freudenheim
Tom V
Tomas Hrnciar
Tomas Orsava
Tomer Chachamu
Tommi Enenkel | AnB
Tomáš Hrnčiar
Tony Beswick
Tony Narlock
Tony Zhaocheng Tan
TonyBeswick
toonarmycaptain
Toshio Kuratomi
toxinu
Travis Swicegood
Tushar Sadhwani
Tzu-ping Chung
Valentin Haenel
Victor Stinner
victorvpaulo
Vikram - Google
Viktor Szépe
Ville Skyttä
Vinay Sajip
Vincent Philippon
Vinicyus Macedo
Vipul Kumar
Vitaly Babiy
Vladimir Fokow
Vladimir Rutsky
W. Trevor King
Wil Tan
Wilfred Hughes
William Edwards
William ML Leslie
William T Olson
William Woodruff
Wilson Mo
wim glenn
Winson Luk
Wolfgang Maier
Wu Zhenyu
XAMES3
Xavier Fernandez
Xianpeng Shen
xoviat
xtreak
YAMAMOTO Takashi
Yen Chi Hsuan
Yeray Diaz Diaz
Yoval P
Yu Jian
Yuan Jing Vincent Yan
Yusuke Hayashi
Zearin
Zhiping Deng
ziebam
Zvezdan Petkovic
Łukasz Langa
Роман Донченко
Семён Марьясин

View File

@ -1,20 +0,0 @@
Copyright (c) 2008-present The pip developers (see AUTHORS.txt file)
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -1,89 +0,0 @@
Metadata-Version: 2.1
Name: pip
Version: 24.2
Summary: The PyPA recommended tool for installing Python packages.
Author-email: The pip developers <distutils-sig@python.org>
License: MIT
Project-URL: Homepage, https://pip.pypa.io/
Project-URL: Documentation, https://pip.pypa.io
Project-URL: Source, https://github.com/pypa/pip
Project-URL: Changelog, https://pip.pypa.io/en/stable/news/
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Topic :: Software Development :: Build Tools
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Requires-Python: >=3.8
Description-Content-Type: text/x-rst
License-File: LICENSE.txt
License-File: AUTHORS.txt
pip - The Python Package Installer
==================================
.. |pypi-version| image:: https://img.shields.io/pypi/v/pip.svg
:target: https://pypi.org/project/pip/
:alt: PyPI
.. |python-versions| image:: https://img.shields.io/pypi/pyversions/pip
:target: https://pypi.org/project/pip
:alt: PyPI - Python Version
.. |docs-badge| image:: https://readthedocs.org/projects/pip/badge/?version=latest
:target: https://pip.pypa.io/en/latest
:alt: Documentation
|pypi-version| |python-versions| |docs-badge|
pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes.
Please take a look at our documentation for how to install and use pip:
* `Installation`_
* `Usage`_
We release updates regularly, with a new version every 3 months. Find more details in our documentation:
* `Release notes`_
* `Release process`_
If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms:
* `Issue tracking`_
* `Discourse channel`_
* `User IRC`_
If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms:
* `GitHub page`_
* `Development documentation`_
* `Development IRC`_
Code of Conduct
---------------
Everyone interacting in the pip project's codebases, issue trackers, chat
rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
.. _package installer: https://packaging.python.org/guides/tool-recommendations/
.. _Python Package Index: https://pypi.org
.. _Installation: https://pip.pypa.io/en/stable/installation/
.. _Usage: https://pip.pypa.io/en/stable/
.. _Release notes: https://pip.pypa.io/en/stable/news.html
.. _Release process: https://pip.pypa.io/en/latest/development/release-process/
.. _GitHub page: https://github.com/pypa/pip
.. _Development documentation: https://pip.pypa.io/en/latest/development
.. _Issue tracking: https://github.com/pypa/pip/issues
.. _Discourse channel: https://discuss.python.org/c/packaging
.. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa
.. _Development IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa-dev
.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md

View File

@ -1,853 +0,0 @@
../../Scripts/pip.exe,sha256=YnCGQqwt62DfdHh5hEP7Rw7BRRG9Qn33lauj6Rm3u7g,108442
../../Scripts/pip3.12.exe,sha256=YnCGQqwt62DfdHh5hEP7Rw7BRRG9Qn33lauj6Rm3u7g,108442
../../Scripts/pip3.exe,sha256=YnCGQqwt62DfdHh5hEP7Rw7BRRG9Qn33lauj6Rm3u7g,108442
pip-24.2.dist-info/AUTHORS.txt,sha256=KDa8Pd3GDeKSogF6yFW0l9A9eMneLDOFrcIDqkL8G8s,10868
pip-24.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
pip-24.2.dist-info/LICENSE.txt,sha256=Y0MApmnUmurmWxLGxIySTFGkzfPR_whtw0VtyLyqIQQ,1093
pip-24.2.dist-info/METADATA,sha256=PhzCxQxIhsnZ871cPUe3Hew9PhhpgflLbfqU3WizZqM,3624
pip-24.2.dist-info/RECORD,,
pip-24.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip-24.2.dist-info/WHEEL,sha256=Wyh-_nZ0DJYolHNn1_hMa4lM7uDedD_RGVwbmTjyItk,91
pip-24.2.dist-info/entry_points.txt,sha256=eeIjuzfnfR2PrhbjnbzFU6MnSS70kZLxwaHHq6M-bD0,87
pip-24.2.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
pip/__init__.py,sha256=EQxEGXUQIu-9fNJxVEK74ufx_fTk_HpYV9lAbw-WWbs,355
pip/__main__.py,sha256=WzbhHXTbSE6gBY19mNN9m4s5o_365LOvTYSgqgbdBhE,854
pip/__pip-runner__.py,sha256=cPPWuJ6NK_k-GzfvlejLFgwzmYUROmpAR6QC3Q-vkXQ,1450
pip/__pycache__/__init__.cpython-312.pyc,,
pip/__pycache__/__main__.cpython-312.pyc,,
pip/__pycache__/__pip-runner__.cpython-312.pyc,,
pip/_internal/__init__.py,sha256=MfcoOluDZ8QMCFYal04IqOJ9q6m2V7a0aOsnI-WOxUo,513
pip/_internal/__pycache__/__init__.cpython-312.pyc,,
pip/_internal/__pycache__/build_env.cpython-312.pyc,,
pip/_internal/__pycache__/cache.cpython-312.pyc,,
pip/_internal/__pycache__/configuration.cpython-312.pyc,,
pip/_internal/__pycache__/exceptions.cpython-312.pyc,,
pip/_internal/__pycache__/main.cpython-312.pyc,,
pip/_internal/__pycache__/pyproject.cpython-312.pyc,,
pip/_internal/__pycache__/self_outdated_check.cpython-312.pyc,,
pip/_internal/__pycache__/wheel_builder.cpython-312.pyc,,
pip/_internal/build_env.py,sha256=QiusW8QEaj387y0hdRqVbuelHSHGYcT7WzVckbmMhR0,10420
pip/_internal/cache.py,sha256=Jb698p5PNigRtpW5o26wQNkkUv4MnQ94mc471wL63A0,10369
pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132
pip/_internal/cli/__pycache__/__init__.cpython-312.pyc,,
pip/_internal/cli/__pycache__/autocompletion.cpython-312.pyc,,
pip/_internal/cli/__pycache__/base_command.cpython-312.pyc,,
pip/_internal/cli/__pycache__/cmdoptions.cpython-312.pyc,,
pip/_internal/cli/__pycache__/command_context.cpython-312.pyc,,
pip/_internal/cli/__pycache__/index_command.cpython-312.pyc,,
pip/_internal/cli/__pycache__/main.cpython-312.pyc,,
pip/_internal/cli/__pycache__/main_parser.cpython-312.pyc,,
pip/_internal/cli/__pycache__/parser.cpython-312.pyc,,
pip/_internal/cli/__pycache__/progress_bars.cpython-312.pyc,,
pip/_internal/cli/__pycache__/req_command.cpython-312.pyc,,
pip/_internal/cli/__pycache__/spinners.cpython-312.pyc,,
pip/_internal/cli/__pycache__/status_codes.cpython-312.pyc,,
pip/_internal/cli/autocompletion.py,sha256=Lli3Mr6aDNu7ZkJJFFvwD2-hFxNI6Avz8OwMyS5TVrs,6865
pip/_internal/cli/base_command.py,sha256=F8nUcSM-Y-MQljJUe724-yxmc5viFXHyM_zH70NmIh4,8289
pip/_internal/cli/cmdoptions.py,sha256=mDqBr0d0hoztbRJs-PWtcKpqNAc7khU6ZpoesZKocT8,30110
pip/_internal/cli/command_context.py,sha256=RHgIPwtObh5KhMrd3YZTkl8zbVG-6Okml7YbFX4Ehg0,774
pip/_internal/cli/index_command.py,sha256=YIJ84cfYcbDBACnB8eoDgqjYJU6GpiWP2Rh7Ij-Xyak,5633
pip/_internal/cli/main.py,sha256=BDZef-bWe9g9Jpr4OVs4dDf-845HJsKw835T7AqEnAc,2817
pip/_internal/cli/main_parser.py,sha256=laDpsuBDl6kyfywp9eMMA9s84jfH2TJJn-vmL0GG90w,4338
pip/_internal/cli/parser.py,sha256=QAkY6s8N-AD7w5D2PQm2Y8C2MIJSv7iuAeNjOMvDBUA,10811
pip/_internal/cli/progress_bars.py,sha256=0FAf7eN67KnIv_gZQhTWSnKXXUzQko1ftGXEoLe5Yec,2713
pip/_internal/cli/req_command.py,sha256=DqeFhmUMs6o6Ev8qawAcOoYNdAZsfyKS0MZI5jsJYwQ,12250
pip/_internal/cli/spinners.py,sha256=hIJ83GerdFgFCdobIA23Jggetegl_uC4Sp586nzFbPE,5118
pip/_internal/cli/status_codes.py,sha256=sEFHUaUJbqv8iArL3HAtcztWZmGOFX01hTesSytDEh0,116
pip/_internal/commands/__init__.py,sha256=5oRO9O3dM2vGuh0bFw4HOVletryrz5HHMmmPWwJrH9U,3882
pip/_internal/commands/__pycache__/__init__.cpython-312.pyc,,
pip/_internal/commands/__pycache__/cache.cpython-312.pyc,,
pip/_internal/commands/__pycache__/check.cpython-312.pyc,,
pip/_internal/commands/__pycache__/completion.cpython-312.pyc,,
pip/_internal/commands/__pycache__/configuration.cpython-312.pyc,,
pip/_internal/commands/__pycache__/debug.cpython-312.pyc,,
pip/_internal/commands/__pycache__/download.cpython-312.pyc,,
pip/_internal/commands/__pycache__/freeze.cpython-312.pyc,,
pip/_internal/commands/__pycache__/hash.cpython-312.pyc,,
pip/_internal/commands/__pycache__/help.cpython-312.pyc,,
pip/_internal/commands/__pycache__/index.cpython-312.pyc,,
pip/_internal/commands/__pycache__/inspect.cpython-312.pyc,,
pip/_internal/commands/__pycache__/install.cpython-312.pyc,,
pip/_internal/commands/__pycache__/list.cpython-312.pyc,,
pip/_internal/commands/__pycache__/search.cpython-312.pyc,,
pip/_internal/commands/__pycache__/show.cpython-312.pyc,,
pip/_internal/commands/__pycache__/uninstall.cpython-312.pyc,,
pip/_internal/commands/__pycache__/wheel.cpython-312.pyc,,
pip/_internal/commands/cache.py,sha256=xg76_ZFEBC6zoQ3gXLRfMZJft4z2a0RwH4GEFZC6nnU,7944
pip/_internal/commands/check.py,sha256=Hr_4eiMd9cgVDgEvjtIdw915NmL7ROIWW8enkr8slPQ,2268
pip/_internal/commands/completion.py,sha256=HT4lD0bgsflHq2IDgYfiEdp7IGGtE7s6MgI3xn0VQEw,4287
pip/_internal/commands/configuration.py,sha256=n98enwp6y0b5G6fiRQjaZo43FlJKYve_daMhN-4BRNc,9766
pip/_internal/commands/debug.py,sha256=DNDRgE9YsKrbYzU0s3VKi8rHtKF4X13CJ_br_8PUXO0,6797
pip/_internal/commands/download.py,sha256=0qB0nys6ZEPsog451lDsjL5Bx7Z97t-B80oFZKhpzKM,5273
pip/_internal/commands/freeze.py,sha256=2Vt72BYTSm9rzue6d8dNzt8idxWK4Db6Hd-anq7GQ80,3203
pip/_internal/commands/hash.py,sha256=EVVOuvGtoPEdFi8SNnmdqlCQrhCxV-kJsdwtdcCnXGQ,1703
pip/_internal/commands/help.py,sha256=gcc6QDkcgHMOuAn5UxaZwAStsRBrnGSn_yxjS57JIoM,1132
pip/_internal/commands/index.py,sha256=RAXxmJwFhVb5S1BYzb5ifX3sn9Na8v2CCVYwSMP8pao,4731
pip/_internal/commands/inspect.py,sha256=PGrY9TRTRCM3y5Ml8Bdk8DEOXquWRfscr4DRo1LOTPc,3189
pip/_internal/commands/install.py,sha256=iqesiLIZc6Op9uihMQFYRhAA2DQRZUxbM4z1BwXoFls,29428
pip/_internal/commands/list.py,sha256=RgaIV4kN-eMSpgUAXc-6bjnURzl0v3cRE11xr54O9Cg,12771
pip/_internal/commands/search.py,sha256=hSGtIHg26LRe468Ly7oZ6gfd9KbTxBRZAAtJc9Um6S4,5628
pip/_internal/commands/show.py,sha256=IG9L5uo8w6UA4tI_IlmaxLCoNKPa5JNJCljj3NWs0OE,7507
pip/_internal/commands/uninstall.py,sha256=7pOR7enK76gimyxQbzxcG1OsyLXL3DvX939xmM8Fvtg,3892
pip/_internal/commands/wheel.py,sha256=eJRhr_qoNNxWAkkdJCNiQM7CXd4E1_YyQhsqJnBPGGg,6414
pip/_internal/configuration.py,sha256=XkAiBS0hpzsM-LF0Qu5hvPWO_Bs67-oQKRYFBuMbESs,14006
pip/_internal/distributions/__init__.py,sha256=Hq6kt6gXBgjNit5hTTWLAzeCNOKoB-N0pGYSqehrli8,858
pip/_internal/distributions/__pycache__/__init__.cpython-312.pyc,,
pip/_internal/distributions/__pycache__/base.cpython-312.pyc,,
pip/_internal/distributions/__pycache__/installed.cpython-312.pyc,,
pip/_internal/distributions/__pycache__/sdist.cpython-312.pyc,,
pip/_internal/distributions/__pycache__/wheel.cpython-312.pyc,,
pip/_internal/distributions/base.py,sha256=QeB9qvKXDIjLdPBDE5fMgpfGqMMCr-govnuoQnGuiF8,1783
pip/_internal/distributions/installed.py,sha256=QinHFbWAQ8oE0pbD8MFZWkwlnfU1QYTccA1vnhrlYOU,842
pip/_internal/distributions/sdist.py,sha256=PlcP4a6-R6c98XnOM-b6Lkb3rsvh9iG4ok8shaanrzs,6751
pip/_internal/distributions/wheel.py,sha256=THBYfnv7VVt8mYhMYUtH13S1E7FDwtDyDfmUcl8ai0E,1317
pip/_internal/exceptions.py,sha256=6qcW3QgmFVlRxlZvDSLUhSzKJ7_Tedo-lyqWA6NfdAU,25371
pip/_internal/index/__init__.py,sha256=vpt-JeTZefh8a-FC22ZeBSXFVbuBcXSGiILhQZJaNpQ,30
pip/_internal/index/__pycache__/__init__.cpython-312.pyc,,
pip/_internal/index/__pycache__/collector.cpython-312.pyc,,
pip/_internal/index/__pycache__/package_finder.cpython-312.pyc,,
pip/_internal/index/__pycache__/sources.cpython-312.pyc,,
pip/_internal/index/collector.py,sha256=RdPO0JLAlmyBWPAWYHPyRoGjz3GNAeTngCNkbGey_mE,16265
pip/_internal/index/package_finder.py,sha256=yRC4xsyudwKnNoU6IXvNoyqYo5ScT7lB6Wa-z2eh7cs,37666
pip/_internal/index/sources.py,sha256=dJegiR9f86kslaAHcv9-R5L_XBf5Rzm_FkyPteDuPxI,8688
pip/_internal/locations/__init__.py,sha256=UaAxeZ_f93FyouuFf4p7SXYF-4WstXuEvd3LbmPCAno,14925
pip/_internal/locations/__pycache__/__init__.cpython-312.pyc,,
pip/_internal/locations/__pycache__/_distutils.cpython-312.pyc,,
pip/_internal/locations/__pycache__/_sysconfig.cpython-312.pyc,,
pip/_internal/locations/__pycache__/base.cpython-312.pyc,,
pip/_internal/locations/_distutils.py,sha256=H9ZHK_35rdDV1Qsmi4QeaBULjFT4Mbu6QuoVGkJ6QHI,6009
pip/_internal/locations/_sysconfig.py,sha256=IGzds60qsFneRogC-oeBaY7bEh3lPt_v47kMJChQXsU,7724
pip/_internal/locations/base.py,sha256=RQiPi1d4FVM2Bxk04dQhXZ2PqkeljEL2fZZ9SYqIQ78,2556
pip/_internal/main.py,sha256=r-UnUe8HLo5XFJz8inTcOOTiu_sxNhgHb6VwlGUllOI,340
pip/_internal/metadata/__init__.py,sha256=9pU3W3s-6HtjFuYhWcLTYVmSaziklPv7k2x8p7X1GmA,4339
pip/_internal/metadata/__pycache__/__init__.cpython-312.pyc,,
pip/_internal/metadata/__pycache__/_json.cpython-312.pyc,,
pip/_internal/metadata/__pycache__/base.cpython-312.pyc,,
pip/_internal/metadata/__pycache__/pkg_resources.cpython-312.pyc,,
pip/_internal/metadata/_json.py,sha256=P0cAJrH_mtmMZvlZ16ZXm_-izA4lpr5wy08laICuiaA,2644
pip/_internal/metadata/base.py,sha256=ft0K5XNgI4ETqZnRv2-CtvgYiMOMAeGMAzxT-f6VLJA,25298
pip/_internal/metadata/importlib/__init__.py,sha256=jUUidoxnHcfITHHaAWG1G2i5fdBYklv_uJcjo2x7VYE,135
pip/_internal/metadata/importlib/__pycache__/__init__.cpython-312.pyc,,
pip/_internal/metadata/importlib/__pycache__/_compat.cpython-312.pyc,,
pip/_internal/metadata/importlib/__pycache__/_dists.cpython-312.pyc,,
pip/_internal/metadata/importlib/__pycache__/_envs.cpython-312.pyc,,
pip/_internal/metadata/importlib/_compat.py,sha256=c6av8sP8BBjAZuFSJow1iWfygUXNM3xRTCn5nqw6B9M,2796
pip/_internal/metadata/importlib/_dists.py,sha256=anh0mLI-FYRPUhAdipd0Va3YJJc6HelCKQ0bFhY10a0,8017
pip/_internal/metadata/importlib/_envs.py,sha256=JHjNfnk9RsjrcQw8dLBqdfBglOKSepEe9aq03B4nRpU,7431
pip/_internal/metadata/pkg_resources.py,sha256=U07ETAINSGeSRBfWUG93E4tZZbaW_f7PGzEqZN0hulc,10542
pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63
pip/_internal/models/__pycache__/__init__.cpython-312.pyc,,
pip/_internal/models/__pycache__/candidate.cpython-312.pyc,,
pip/_internal/models/__pycache__/direct_url.cpython-312.pyc,,
pip/_internal/models/__pycache__/format_control.cpython-312.pyc,,
pip/_internal/models/__pycache__/index.cpython-312.pyc,,
pip/_internal/models/__pycache__/installation_report.cpython-312.pyc,,
pip/_internal/models/__pycache__/link.cpython-312.pyc,,
pip/_internal/models/__pycache__/scheme.cpython-312.pyc,,
pip/_internal/models/__pycache__/search_scope.cpython-312.pyc,,
pip/_internal/models/__pycache__/selection_prefs.cpython-312.pyc,,
pip/_internal/models/__pycache__/target_python.cpython-312.pyc,,
pip/_internal/models/__pycache__/wheel.cpython-312.pyc,,
pip/_internal/models/candidate.py,sha256=zzgFRuw_kWPjKpGw7LC0ZUMD2CQ2EberUIYs8izjdCA,753
pip/_internal/models/direct_url.py,sha256=uBtY2HHd3TO9cKQJWh0ThvE5FRr-MWRYChRU4IG9HZE,6578
pip/_internal/models/format_control.py,sha256=wtsQqSK9HaUiNxQEuB-C62eVimw6G4_VQFxV9-_KDBE,2486
pip/_internal/models/index.py,sha256=tYnL8oxGi4aSNWur0mG8DAP7rC6yuha_MwJO8xw0crI,1030
pip/_internal/models/installation_report.py,sha256=zRVZoaz-2vsrezj_H3hLOhMZCK9c7TbzWgC-jOalD00,2818
pip/_internal/models/link.py,sha256=jHax9O-9zlSzEwjBCDkx0OXjKXwBDwOuPwn-PsR8dCs,21034
pip/_internal/models/scheme.py,sha256=PakmHJM3e8OOWSZFtfz1Az7f1meONJnkGuQxFlt3wBE,575
pip/_internal/models/search_scope.py,sha256=67NEnsYY84784S-MM7ekQuo9KXLH-7MzFntXjapvAo0,4531
pip/_internal/models/selection_prefs.py,sha256=qaFfDs3ciqoXPg6xx45N1jPLqccLJw4N0s4P0PyHTQ8,2015
pip/_internal/models/target_python.py,sha256=2XaH2rZ5ZF-K5wcJbEMGEl7SqrTToDDNkrtQ2v_v_-Q,4271
pip/_internal/models/wheel.py,sha256=Odc1NVWL5N-i6A3vFa50BfNvCRlGvGa4som60FQM198,3601
pip/_internal/network/__init__.py,sha256=jf6Tt5nV_7zkARBrKojIXItgejvoegVJVKUbhAa5Ioc,50
pip/_internal/network/__pycache__/__init__.cpython-312.pyc,,
pip/_internal/network/__pycache__/auth.cpython-312.pyc,,
pip/_internal/network/__pycache__/cache.cpython-312.pyc,,
pip/_internal/network/__pycache__/download.cpython-312.pyc,,
pip/_internal/network/__pycache__/lazy_wheel.cpython-312.pyc,,
pip/_internal/network/__pycache__/session.cpython-312.pyc,,
pip/_internal/network/__pycache__/utils.cpython-312.pyc,,
pip/_internal/network/__pycache__/xmlrpc.cpython-312.pyc,,
pip/_internal/network/auth.py,sha256=D4gASjUrqoDFlSt6gQ767KAAjv6PUyJU0puDlhXNVRE,20809
pip/_internal/network/cache.py,sha256=48A971qCzKNFvkb57uGEk7-0xaqPS0HWj2711QNTxkU,3935
pip/_internal/network/download.py,sha256=FLOP29dPYECBiAi7eEjvAbNkyzaKNqbyjOT2m8HPW8U,6048
pip/_internal/network/lazy_wheel.py,sha256=2PXVduYZPCPZkkQFe1J1GbfHJWeCU--FXonGyIfw9eU,7638
pip/_internal/network/session.py,sha256=XmanBKjVwPFmh1iJ58q6TDh9xabH37gREuQJ_feuZGA,18741
pip/_internal/network/utils.py,sha256=Inaxel-NxBu4PQWkjyErdnfewsFCcgHph7dzR1-FboY,4088
pip/_internal/network/xmlrpc.py,sha256=sAxzOacJ-N1NXGPvap9jC3zuYWSnnv3GXtgR2-E2APA,1838
pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_internal/operations/__pycache__/__init__.cpython-312.pyc,,
pip/_internal/operations/__pycache__/check.cpython-312.pyc,,
pip/_internal/operations/__pycache__/freeze.cpython-312.pyc,,
pip/_internal/operations/__pycache__/prepare.cpython-312.pyc,,
pip/_internal/operations/build/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_internal/operations/build/__pycache__/__init__.cpython-312.pyc,,
pip/_internal/operations/build/__pycache__/build_tracker.cpython-312.pyc,,
pip/_internal/operations/build/__pycache__/metadata.cpython-312.pyc,,
pip/_internal/operations/build/__pycache__/metadata_editable.cpython-312.pyc,,
pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-312.pyc,,
pip/_internal/operations/build/__pycache__/wheel.cpython-312.pyc,,
pip/_internal/operations/build/__pycache__/wheel_editable.cpython-312.pyc,,
pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-312.pyc,,
pip/_internal/operations/build/build_tracker.py,sha256=-ARW_TcjHCOX7D2NUOGntB4Fgc6b4aolsXkAK6BWL7w,4774
pip/_internal/operations/build/metadata.py,sha256=9S0CUD8U3QqZeXp-Zyt8HxwU90lE4QrnYDgrqZDzBnc,1422
pip/_internal/operations/build/metadata_editable.py,sha256=VLL7LvntKE8qxdhUdEJhcotFzUsOSI8NNS043xULKew,1474
pip/_internal/operations/build/metadata_legacy.py,sha256=8i6i1QZX9m_lKPStEFsHKM0MT4a-CD408JOw99daLmo,2190
pip/_internal/operations/build/wheel.py,sha256=sT12FBLAxDC6wyrDorh8kvcZ1jG5qInCRWzzP-UkJiQ,1075
pip/_internal/operations/build/wheel_editable.py,sha256=yOtoH6zpAkoKYEUtr8FhzrYnkNHQaQBjWQ2HYae1MQg,1417
pip/_internal/operations/build/wheel_legacy.py,sha256=K-6kNhmj-1xDF45ny1yheMerF0ui4EoQCLzEoHh6-tc,3045
pip/_internal/operations/check.py,sha256=L24vRL8VWbyywdoeAhM89WCd8zLTnjIbULlKelUgIec,5912
pip/_internal/operations/freeze.py,sha256=V59yEyCSz_YhZuhH09-6aV_zvYBMrS_IxFFNqn2QzlA,9864
pip/_internal/operations/install/__init__.py,sha256=mX7hyD2GNBO2mFGokDQ30r_GXv7Y_PLdtxcUv144e-s,51
pip/_internal/operations/install/__pycache__/__init__.cpython-312.pyc,,
pip/_internal/operations/install/__pycache__/editable_legacy.cpython-312.pyc,,
pip/_internal/operations/install/__pycache__/wheel.cpython-312.pyc,,
pip/_internal/operations/install/editable_legacy.py,sha256=PoEsNEPGbIZ2yQphPsmYTKLOCMs4gv5OcCdzW124NcA,1283
pip/_internal/operations/install/wheel.py,sha256=X5Iz9yUg5LlK5VNQ9g2ikc6dcRu8EPi_SUi5iuEDRgo,27615
pip/_internal/operations/prepare.py,sha256=joWJwPkuqGscQgVNImLK71e9hRapwKvRCM8HclysmvU,28118
pip/_internal/pyproject.py,sha256=rw4fwlptDp1hZgYoplwbAGwWA32sWQkp7ysf8Ju6iXc,7287
pip/_internal/req/__init__.py,sha256=HxBFtZy_BbCclLgr26waMtpzYdO5T3vxePvpGAXSt5s,2653
pip/_internal/req/__pycache__/__init__.cpython-312.pyc,,
pip/_internal/req/__pycache__/constructors.cpython-312.pyc,,
pip/_internal/req/__pycache__/req_file.cpython-312.pyc,,
pip/_internal/req/__pycache__/req_install.cpython-312.pyc,,
pip/_internal/req/__pycache__/req_set.cpython-312.pyc,,
pip/_internal/req/__pycache__/req_uninstall.cpython-312.pyc,,
pip/_internal/req/constructors.py,sha256=qXNZtUqhsXpHxkRaIQhp20_Kz6I88MDKM8SQR9fckIc,18424
pip/_internal/req/req_file.py,sha256=hnC9Oz-trqGQpuDnCVWqwpJkAvtbCsk7-5k0EWVQhlQ,17687
pip/_internal/req/req_install.py,sha256=yhT98NGDoAEk03jznTJnYCznzhiMEEA2ocgsUG_dcNU,35788
pip/_internal/req/req_set.py,sha256=j3esG0s6SzoVReX9rWn4rpYNtyET_fwxbwJPRimvRxo,2858
pip/_internal/req/req_uninstall.py,sha256=qzDIxJo-OETWqGais7tSMCDcWbATYABT-Tid3ityF0s,23853
pip/_internal/resolution/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_internal/resolution/__pycache__/__init__.cpython-312.pyc,,
pip/_internal/resolution/__pycache__/base.cpython-312.pyc,,
pip/_internal/resolution/base.py,sha256=qlmh325SBVfvG6Me9gc5Nsh5sdwHBwzHBq6aEXtKsLA,583
pip/_internal/resolution/legacy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_internal/resolution/legacy/__pycache__/__init__.cpython-312.pyc,,
pip/_internal/resolution/legacy/__pycache__/resolver.cpython-312.pyc,,
pip/_internal/resolution/legacy/resolver.py,sha256=3HZiJBRd1FTN6jQpI4qRO8-TbLYeIbUTS6PFvXnXs2w,24068
pip/_internal/resolution/resolvelib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-312.pyc,,
pip/_internal/resolution/resolvelib/__pycache__/base.cpython-312.pyc,,
pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-312.pyc,,
pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-312.pyc,,
pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-312.pyc,,
pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-312.pyc,,
pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-312.pyc,,
pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-312.pyc,,
pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-312.pyc,,
pip/_internal/resolution/resolvelib/base.py,sha256=DCf669FsqyQY5uqXeePDHQY1e4QO-pBzWH8O0s9-K94,5023
pip/_internal/resolution/resolvelib/candidates.py,sha256=07CBc85ya3J19XqdvUsLQwtVIxiTYq9km9hbTRh0jb0,19823
pip/_internal/resolution/resolvelib/factory.py,sha256=mTTq_nG1F9Eq3VnlYPH6Ap-mydcS-mxC5y5L-CLLp80,32459
pip/_internal/resolution/resolvelib/found_candidates.py,sha256=9hrTyQqFvl9I7Tji79F1AxHv39Qh1rkJ_7deSHSMfQc,6383
pip/_internal/resolution/resolvelib/provider.py,sha256=bcsFnYvlmtB80cwVdW1fIwgol8ZNr1f1VHyRTkz47SM,9935
pip/_internal/resolution/resolvelib/reporter.py,sha256=00JtoXEkTlw0-rl_sl54d71avwOsJHt9GGHcrj5Sza0,3168
pip/_internal/resolution/resolvelib/requirements.py,sha256=7JG4Z72e5Yk4vU0S5ulGvbqTy4FMQGYhY5zQhX9zTtY,8065
pip/_internal/resolution/resolvelib/resolver.py,sha256=nLJOsVMEVi2gQUVJoUFKMZAeu2f7GRMjGMvNSWyz0Bc,12592
pip/_internal/self_outdated_check.py,sha256=pkjQixuWyQ1vrVxZAaYD6SSHgXuFUnHZybXEWTkh0S0,8145
pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_internal/utils/__pycache__/__init__.cpython-312.pyc,,
pip/_internal/utils/__pycache__/_jaraco_text.cpython-312.pyc,,
pip/_internal/utils/__pycache__/_log.cpython-312.pyc,,
pip/_internal/utils/__pycache__/appdirs.cpython-312.pyc,,
pip/_internal/utils/__pycache__/compat.cpython-312.pyc,,
pip/_internal/utils/__pycache__/compatibility_tags.cpython-312.pyc,,
pip/_internal/utils/__pycache__/datetime.cpython-312.pyc,,
pip/_internal/utils/__pycache__/deprecation.cpython-312.pyc,,
pip/_internal/utils/__pycache__/direct_url_helpers.cpython-312.pyc,,
pip/_internal/utils/__pycache__/egg_link.cpython-312.pyc,,
pip/_internal/utils/__pycache__/encoding.cpython-312.pyc,,
pip/_internal/utils/__pycache__/entrypoints.cpython-312.pyc,,
pip/_internal/utils/__pycache__/filesystem.cpython-312.pyc,,
pip/_internal/utils/__pycache__/filetypes.cpython-312.pyc,,
pip/_internal/utils/__pycache__/glibc.cpython-312.pyc,,
pip/_internal/utils/__pycache__/hashes.cpython-312.pyc,,
pip/_internal/utils/__pycache__/logging.cpython-312.pyc,,
pip/_internal/utils/__pycache__/misc.cpython-312.pyc,,
pip/_internal/utils/__pycache__/packaging.cpython-312.pyc,,
pip/_internal/utils/__pycache__/retry.cpython-312.pyc,,
pip/_internal/utils/__pycache__/setuptools_build.cpython-312.pyc,,
pip/_internal/utils/__pycache__/subprocess.cpython-312.pyc,,
pip/_internal/utils/__pycache__/temp_dir.cpython-312.pyc,,
pip/_internal/utils/__pycache__/unpacking.cpython-312.pyc,,
pip/_internal/utils/__pycache__/urls.cpython-312.pyc,,
pip/_internal/utils/__pycache__/virtualenv.cpython-312.pyc,,
pip/_internal/utils/__pycache__/wheel.cpython-312.pyc,,
pip/_internal/utils/_jaraco_text.py,sha256=M15uUPIh5NpP1tdUGBxRau6q1ZAEtI8-XyLEETscFfE,3350
pip/_internal/utils/_log.py,sha256=-jHLOE_THaZz5BFcCnoSL9EYAtJ0nXem49s9of4jvKw,1015
pip/_internal/utils/appdirs.py,sha256=swgcTKOm3daLeXTW6v5BUS2Ti2RvEnGRQYH_yDXklAo,1665
pip/_internal/utils/compat.py,sha256=ckkFveBiYQjRWjkNsajt_oWPS57tJvE8XxoC4OIYgCY,2399
pip/_internal/utils/compatibility_tags.py,sha256=ydin8QG8BHqYRsPY4OL6cmb44CbqXl1T0xxS97VhHkk,5377
pip/_internal/utils/datetime.py,sha256=m21Y3wAtQc-ji6Veb6k_M5g6A0ZyFI4egchTdnwh-pQ,242
pip/_internal/utils/deprecation.py,sha256=k7Qg_UBAaaTdyq82YVARA6D7RmcGTXGv7fnfcgigj4Q,3707
pip/_internal/utils/direct_url_helpers.py,sha256=r2MRtkVDACv9AGqYODBUC9CjwgtsUU1s68hmgfCJMtA,3196
pip/_internal/utils/egg_link.py,sha256=0FePZoUYKv4RGQ2t6x7w5Z427wbA_Uo3WZnAkrgsuqo,2463
pip/_internal/utils/encoding.py,sha256=qqsXDtiwMIjXMEiIVSaOjwH5YmirCaK-dIzb6-XJsL0,1169
pip/_internal/utils/entrypoints.py,sha256=YlhLTRl2oHBAuqhc-zmL7USS67TPWVHImjeAQHreZTQ,3064
pip/_internal/utils/filesystem.py,sha256=ajvA-q4ocliW9kPp8Yquh-4vssXbu-UKbo5FV9V4X64,4950
pip/_internal/utils/filetypes.py,sha256=i8XAQ0eFCog26Fw9yV0Yb1ygAqKYB1w9Cz9n0fj8gZU,716
pip/_internal/utils/glibc.py,sha256=vUkWq_1pJuzcYNcGKLlQmABoUiisK8noYY1yc8Wq4w4,3734
pip/_internal/utils/hashes.py,sha256=XGGLL0AG8-RhWnyz87xF6MFZ--BKadHU35D47eApCKI,4972
pip/_internal/utils/logging.py,sha256=7BFKB1uFjdxD5crM-GtwA5T2qjbQ2LPD-gJDuJeDNTg,11606
pip/_internal/utils/misc.py,sha256=HR_V97vNTHNzwq01JrnTZtsLLkWAOJ9_EeYfHJZSgDY,23745
pip/_internal/utils/packaging.py,sha256=iI3LH43lVNR4hWBOqF6lFsZq4aycb2j0UcHlmDmcqUg,2109
pip/_internal/utils/retry.py,sha256=mhFbykXjhTnZfgzeuy-vl9c8nECnYn_CMtwNJX2tYzQ,1392
pip/_internal/utils/setuptools_build.py,sha256=ouXpud-jeS8xPyTPsXJ-m34NPvK5os45otAzdSV_IJE,4435
pip/_internal/utils/subprocess.py,sha256=EsvqSRiSMHF98T8Txmu6NLU3U--MpTTQjtNgKP0P--M,8988
pip/_internal/utils/temp_dir.py,sha256=5qOXe8M4JeY6vaFQM867d5zkp1bSwMZ-KT5jymmP0Zg,9310
pip/_internal/utils/unpacking.py,sha256=eyDkSsk4nW8ZfiSjNzJduCznpHyaGHVv3ak_LMGsiEM,11951
pip/_internal/utils/urls.py,sha256=qceSOZb5lbNDrHNsv7_S4L4Ytszja5NwPKUMnZHbYnM,1599
pip/_internal/utils/virtualenv.py,sha256=S6f7csYorRpiD6cvn3jISZYc3I8PJC43H5iMFpRAEDU,3456
pip/_internal/utils/wheel.py,sha256=b442jkydFHjXzDy6cMR7MpzWBJ1Q82hR5F33cmcHV3g,4494
pip/_internal/vcs/__init__.py,sha256=UAqvzpbi0VbZo3Ub6skEeZAw-ooIZR-zX_WpCbxyCoU,596
pip/_internal/vcs/__pycache__/__init__.cpython-312.pyc,,
pip/_internal/vcs/__pycache__/bazaar.cpython-312.pyc,,
pip/_internal/vcs/__pycache__/git.cpython-312.pyc,,
pip/_internal/vcs/__pycache__/mercurial.cpython-312.pyc,,
pip/_internal/vcs/__pycache__/subversion.cpython-312.pyc,,
pip/_internal/vcs/__pycache__/versioncontrol.cpython-312.pyc,,
pip/_internal/vcs/bazaar.py,sha256=EKStcQaKpNu0NK4p5Q10Oc4xb3DUxFw024XrJy40bFQ,3528
pip/_internal/vcs/git.py,sha256=3tpc9LQA_J4IVW5r5NvWaaSeDzcmJOrSFZN0J8vIKfU,18177
pip/_internal/vcs/mercurial.py,sha256=oULOhzJ2Uie-06d1omkL-_Gc6meGaUkyogvqG9ZCyPs,5249
pip/_internal/vcs/subversion.py,sha256=ddTugHBqHzV3ebKlU5QXHPN4gUqlyXbOx8q8NgXKvs8,11735
pip/_internal/vcs/versioncontrol.py,sha256=cvf_-hnTAjQLXJ3d17FMNhQfcO1AcKWUF10tfrYyP-c,22440
pip/_internal/wheel_builder.py,sha256=DL3A8LKeRj_ACp11WS5wSgASgPFqeyAeXJKdXfmaWXU,11799
pip/_vendor/__init__.py,sha256=JYuAXvClhInxIrA2FTp5p-uuWVL7WV6-vEpTs46-Qh4,4873
pip/_vendor/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/__pycache__/typing_extensions.cpython-312.pyc,,
pip/_vendor/cachecontrol/__init__.py,sha256=GiYoagwPEiJ_xR_lbwWGaoCiPtF_rz4isjfjdDAgHU4,676
pip/_vendor/cachecontrol/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-312.pyc,,
pip/_vendor/cachecontrol/__pycache__/adapter.cpython-312.pyc,,
pip/_vendor/cachecontrol/__pycache__/cache.cpython-312.pyc,,
pip/_vendor/cachecontrol/__pycache__/controller.cpython-312.pyc,,
pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-312.pyc,,
pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-312.pyc,,
pip/_vendor/cachecontrol/__pycache__/serialize.cpython-312.pyc,,
pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-312.pyc,,
pip/_vendor/cachecontrol/_cmd.py,sha256=iist2EpzJvDVIhMAxXq8iFnTBsiZAd6iplxfmNboNyk,1737
pip/_vendor/cachecontrol/adapter.py,sha256=fByO_Pd_EOemjWbuocvBWdN85xT0q_TBm2lxS6vD4fk,6355
pip/_vendor/cachecontrol/cache.py,sha256=OTQj72tUf8C1uEgczdl3Gc8vkldSzsTITKtDGKMx4z8,1952
pip/_vendor/cachecontrol/caches/__init__.py,sha256=dtrrroK5BnADR1GWjCZ19aZ0tFsMfvFBtLQQU1sp_ag,303
pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-312.pyc,,
pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-312.pyc,,
pip/_vendor/cachecontrol/caches/file_cache.py,sha256=9AlmmTJc6cslb6k5z_6q0sGPHVrMj8zv-uWy-simmfE,5406
pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=9rmqwtYu_ljVkW6_oLqbC7EaX_a8YT_yLuna-eS0dgo,1386
pip/_vendor/cachecontrol/controller.py,sha256=o-ejGJlBmpKK8QQLyTPJj0t7siU8XVHXuV8MCybCxQ8,18575
pip/_vendor/cachecontrol/filewrapper.py,sha256=STttGmIPBvZzt2b51dUOwoWX5crcMCpKZOisM3f5BNc,4292
pip/_vendor/cachecontrol/heuristics.py,sha256=IYe4QmHERWsMvtxNrp920WeaIsaTTyqLB14DSheSbtY,4834
pip/_vendor/cachecontrol/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_vendor/cachecontrol/serialize.py,sha256=HQd2IllQ05HzPkVLMXTF2uX5mjEQjDBkxCqUJUODpZk,5163
pip/_vendor/cachecontrol/wrapper.py,sha256=hsGc7g8QGQTT-4f8tgz3AM5qwScg6FO0BSdLSRdEvpU,1417
pip/_vendor/certifi/__init__.py,sha256=LHXz7E80YJYBzCBv6ZyidQ5-ciYSkSebpY2E5OM0l7o,94
pip/_vendor/certifi/__main__.py,sha256=1k3Cr95vCxxGRGDljrW3wMdpZdL3Nhf0u1n-k2qdsCY,255
pip/_vendor/certifi/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/certifi/__pycache__/__main__.cpython-312.pyc,,
pip/_vendor/certifi/__pycache__/core.cpython-312.pyc,,
pip/_vendor/certifi/cacert.pem,sha256=SIupYGAr8HzGP073rsEIaS_sQYIPwzKKjj894DgUmu4,291528
pip/_vendor/certifi/core.py,sha256=2SRT5rIcQChFDbe37BQa-kULxAgJ8qN6l1jfqTp4HIs,4486
pip/_vendor/certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_vendor/distlib/__init__.py,sha256=hJKF7FHoqbmGckncDuEINWo_OYkDNiHODtYXSMcvjcc,625
pip/_vendor/distlib/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/distlib/__pycache__/compat.cpython-312.pyc,,
pip/_vendor/distlib/__pycache__/database.cpython-312.pyc,,
pip/_vendor/distlib/__pycache__/index.cpython-312.pyc,,
pip/_vendor/distlib/__pycache__/locators.cpython-312.pyc,,
pip/_vendor/distlib/__pycache__/manifest.cpython-312.pyc,,
pip/_vendor/distlib/__pycache__/markers.cpython-312.pyc,,
pip/_vendor/distlib/__pycache__/metadata.cpython-312.pyc,,
pip/_vendor/distlib/__pycache__/resources.cpython-312.pyc,,
pip/_vendor/distlib/__pycache__/scripts.cpython-312.pyc,,
pip/_vendor/distlib/__pycache__/util.cpython-312.pyc,,
pip/_vendor/distlib/__pycache__/version.cpython-312.pyc,,
pip/_vendor/distlib/__pycache__/wheel.cpython-312.pyc,,
pip/_vendor/distlib/compat.py,sha256=Un-uIBvy02w-D267OG4VEhuddqWgKj9nNkxVltAb75w,41487
pip/_vendor/distlib/database.py,sha256=0V9Qvs0Vrxa2F_-hLWitIyVyRifJ0pCxyOI-kEOBwsA,51965
pip/_vendor/distlib/index.py,sha256=lTbw268rRhj8dw1sib3VZ_0EhSGgoJO3FKJzSFMOaeA,20797
pip/_vendor/distlib/locators.py,sha256=o1r_M86_bRLafSpetmyfX8KRtFu-_Q58abvQrnOSnbA,51767
pip/_vendor/distlib/manifest.py,sha256=3qfmAmVwxRqU1o23AlfXrQGZzh6g_GGzTAP_Hb9C5zQ,14168
pip/_vendor/distlib/markers.py,sha256=n3DfOh1yvZ_8EW7atMyoYeZFXjYla0Nz0itQlojCd0A,5268
pip/_vendor/distlib/metadata.py,sha256=pB9WZ9mBfmQxc9OVIldLS5CjOoQRvKAvUwwQyKwKQtQ,39693
pip/_vendor/distlib/resources.py,sha256=LwbPksc0A1JMbi6XnuPdMBUn83X7BPuFNWqPGEKI698,10820
pip/_vendor/distlib/scripts.py,sha256=8_gP9J7_tlNRicnWmPX4ZiDlP5wTwJKDeeg-8_qXUZU,18780
pip/_vendor/distlib/t32.exe,sha256=a0GV5kCoWsMutvliiCKmIgV98eRZ33wXoS-XrqvJQVs,97792
pip/_vendor/distlib/t64-arm.exe,sha256=68TAa32V504xVBnufojh0PcenpR3U4wAqTqf-MZqbPw,182784
pip/_vendor/distlib/t64.exe,sha256=gaYY8hy4fbkHYTTnA4i26ct8IQZzkBG2pRdy0iyuBrc,108032
pip/_vendor/distlib/util.py,sha256=XSznxEi_i3T20UJuaVc0qXHz5ksGUCW1khYlBprN_QE,67530
pip/_vendor/distlib/version.py,sha256=9pXkduchve_aN7JG6iL9VTYV_kqNSGoc2Dwl8JuySnQ,23747
pip/_vendor/distlib/w32.exe,sha256=R4csx3-OGM9kL4aPIzQKRo5TfmRSHZo6QWyLhDhNBks,91648
pip/_vendor/distlib/w64-arm.exe,sha256=xdyYhKj0WDcVUOCb05blQYvzdYIKMbmJn2SZvzkcey4,168448
pip/_vendor/distlib/w64.exe,sha256=ejGf-rojoBfXseGLpya6bFTFPWRG21X5KvU8J5iU-K0,101888
pip/_vendor/distlib/wheel.py,sha256=FVQCve8u-L0QYk5-YTZc7s4WmNQdvjRWTK08KXzZVX4,43958
pip/_vendor/distro/__init__.py,sha256=2fHjF-SfgPvjyNZ1iHh_wjqWdR_Yo5ODHwZC0jLBPhc,981
pip/_vendor/distro/__main__.py,sha256=bu9d3TifoKciZFcqRBuygV3GSuThnVD_m2IK4cz96Vs,64
pip/_vendor/distro/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/distro/__pycache__/__main__.cpython-312.pyc,,
pip/_vendor/distro/__pycache__/distro.cpython-312.pyc,,
pip/_vendor/distro/distro.py,sha256=XqbefacAhDT4zr_trnbA15eY8vdK4GTghgmvUGrEM_4,49430
pip/_vendor/distro/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_vendor/idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849
pip/_vendor/idna/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/idna/__pycache__/codec.cpython-312.pyc,,
pip/_vendor/idna/__pycache__/compat.cpython-312.pyc,,
pip/_vendor/idna/__pycache__/core.cpython-312.pyc,,
pip/_vendor/idna/__pycache__/idnadata.cpython-312.pyc,,
pip/_vendor/idna/__pycache__/intranges.cpython-312.pyc,,
pip/_vendor/idna/__pycache__/package_data.cpython-312.pyc,,
pip/_vendor/idna/__pycache__/uts46data.cpython-312.pyc,,
pip/_vendor/idna/codec.py,sha256=PS6m-XmdST7Wj7J7ulRMakPDt5EBJyYrT3CPtjh-7t4,3426
pip/_vendor/idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321
pip/_vendor/idna/core.py,sha256=lyhpoe2vulEaB_65xhXmoKgO-xUqFDvcwxu5hpNNO4E,12663
pip/_vendor/idna/idnadata.py,sha256=dqRwytzkjIHMBa2R1lYvHDwACenZPt8eGVu1Y8UBE-E,78320
pip/_vendor/idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881
pip/_vendor/idna/package_data.py,sha256=Tkt0KnIeyIlnHddOaz9WSkkislNgokJAuE-p5GorMqo,21
pip/_vendor/idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_vendor/idna/uts46data.py,sha256=1KuksWqLuccPXm2uyRVkhfiFLNIhM_H2m4azCcnOqEU,206503
pip/_vendor/msgpack/__init__.py,sha256=gsMP7JTECZNUSjvOyIbdhNOkpB9Z8BcGwabVGY2UcdQ,1077
pip/_vendor/msgpack/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/msgpack/__pycache__/exceptions.cpython-312.pyc,,
pip/_vendor/msgpack/__pycache__/ext.cpython-312.pyc,,
pip/_vendor/msgpack/__pycache__/fallback.cpython-312.pyc,,
pip/_vendor/msgpack/exceptions.py,sha256=dCTWei8dpkrMsQDcjQk74ATl9HsIBH0ybt8zOPNqMYc,1081
pip/_vendor/msgpack/ext.py,sha256=fKp00BqDLjUtZnPd70Llr138zk8JsCuSpJkkZ5S4dt8,5629
pip/_vendor/msgpack/fallback.py,sha256=wdUWJkWX2gzfRW9BBCTOuIE1Wvrf5PtBtR8ZtY7G_EE,33175
pip/_vendor/packaging/__init__.py,sha256=dtw2bNmWCQ9WnMoK3bk_elL1svSlikXtLpZhCFIB9SE,496
pip/_vendor/packaging/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/packaging/__pycache__/_elffile.cpython-312.pyc,,
pip/_vendor/packaging/__pycache__/_manylinux.cpython-312.pyc,,
pip/_vendor/packaging/__pycache__/_musllinux.cpython-312.pyc,,
pip/_vendor/packaging/__pycache__/_parser.cpython-312.pyc,,
pip/_vendor/packaging/__pycache__/_structures.cpython-312.pyc,,
pip/_vendor/packaging/__pycache__/_tokenizer.cpython-312.pyc,,
pip/_vendor/packaging/__pycache__/markers.cpython-312.pyc,,
pip/_vendor/packaging/__pycache__/metadata.cpython-312.pyc,,
pip/_vendor/packaging/__pycache__/requirements.cpython-312.pyc,,
pip/_vendor/packaging/__pycache__/specifiers.cpython-312.pyc,,
pip/_vendor/packaging/__pycache__/tags.cpython-312.pyc,,
pip/_vendor/packaging/__pycache__/utils.cpython-312.pyc,,
pip/_vendor/packaging/__pycache__/version.cpython-312.pyc,,
pip/_vendor/packaging/_elffile.py,sha256=_LcJW4YNKywYsl4169B2ukKRqwxjxst_8H0FRVQKlz8,3282
pip/_vendor/packaging/_manylinux.py,sha256=Xo4V0PZz8sbuVCbTni0t1CR0AHeir_7ib4lTmV8scD4,9586
pip/_vendor/packaging/_musllinux.py,sha256=p9ZqNYiOItGee8KcZFeHF_YcdhVwGHdK6r-8lgixvGQ,2694
pip/_vendor/packaging/_parser.py,sha256=s_TvTvDNK0NrM2QB3VKThdWFM4Nc0P6JnkObkl3MjpM,10236
pip/_vendor/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
pip/_vendor/packaging/_tokenizer.py,sha256=J6v5H7Jzvb-g81xp_2QACKwO7LxHQA6ikryMU7zXwN8,5273
pip/_vendor/packaging/markers.py,sha256=dWKSqn5Sp-jDmOG-W3GfLHKjwhf1IsznbT71VlBoB5M,10671
pip/_vendor/packaging/metadata.py,sha256=KINuSkJ12u-SyoKNTy_pHNGAfMUtxNvZ53qA1zAKcKI,32349
pip/_vendor/packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_vendor/packaging/requirements.py,sha256=gYyRSAdbrIyKDY66ugIDUQjRMvxkH2ALioTmX3tnL6o,2947
pip/_vendor/packaging/specifiers.py,sha256=HfGgfNJRvrzC759gnnoojHyiWs_DYmcw5PEh5jHH-YE,39738
pip/_vendor/packaging/tags.py,sha256=y8EbheOu9WS7s-MebaXMcHMF-jzsA_C1Lz5XRTiSy4w,18883
pip/_vendor/packaging/utils.py,sha256=NAdYUwnlAOpkat_RthavX8a07YuVxgGL_vwrx73GSDM,5287
pip/_vendor/packaging/version.py,sha256=wE4sSVlF-d1H6HFC1vszEe35CwTig_fh4HHIFg95hFE,16210
pip/_vendor/pkg_resources/__init__.py,sha256=jrhDRbOubP74QuPXxd7U7Po42PH2l-LZ2XfcO7llpZ4,124463
pip/_vendor/pkg_resources/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/platformdirs/__init__.py,sha256=FTA6LGNm40GwNZt3gG3uLAacWvf2E_2HTmH0rAALGR8,22285
pip/_vendor/platformdirs/__main__.py,sha256=jBJ8zb7Mpx5ebcqF83xrpO94MaeCpNGHVf9cvDN2JLg,1505
pip/_vendor/platformdirs/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/platformdirs/__pycache__/__main__.cpython-312.pyc,,
pip/_vendor/platformdirs/__pycache__/android.cpython-312.pyc,,
pip/_vendor/platformdirs/__pycache__/api.cpython-312.pyc,,
pip/_vendor/platformdirs/__pycache__/macos.cpython-312.pyc,,
pip/_vendor/platformdirs/__pycache__/unix.cpython-312.pyc,,
pip/_vendor/platformdirs/__pycache__/version.cpython-312.pyc,,
pip/_vendor/platformdirs/__pycache__/windows.cpython-312.pyc,,
pip/_vendor/platformdirs/android.py,sha256=xZXY9Jd46WOsxT2U6-5HsNtDZ-IQqxcEUrBLl3hYk4o,9016
pip/_vendor/platformdirs/api.py,sha256=QBYdUac2eC521ek_y53uD1Dcq-lJX8IgSRVd4InC6uc,8996
pip/_vendor/platformdirs/macos.py,sha256=wftsbsvq6nZ0WORXSiCrZNkRHz_WKuktl0a6mC7MFkI,5580
pip/_vendor/platformdirs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_vendor/platformdirs/unix.py,sha256=Cci9Wqt35dAMsg6HT9nRGHSBW5obb0pR3AE1JJnsCXg,10643
pip/_vendor/platformdirs/version.py,sha256=r7F76tZRjgQKzrpx_I0_ZMQOMU-PS7eGnHD7zEK3KB0,411
pip/_vendor/platformdirs/windows.py,sha256=IFpiohUBwxPtCzlyKwNtxyW4Jk8haa6W8o59mfrDXVo,10125
pip/_vendor/pygments/__init__.py,sha256=7N1oiaWulw_nCsTY4EEixYLz15pWY5u4uPAFFi-ielU,2983
pip/_vendor/pygments/__main__.py,sha256=isIhBxLg65nLlXukG4VkMuPfNdd7gFzTZ_R_z3Q8diY,353
pip/_vendor/pygments/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/pygments/__pycache__/__main__.cpython-312.pyc,,
pip/_vendor/pygments/__pycache__/cmdline.cpython-312.pyc,,
pip/_vendor/pygments/__pycache__/console.cpython-312.pyc,,
pip/_vendor/pygments/__pycache__/filter.cpython-312.pyc,,
pip/_vendor/pygments/__pycache__/formatter.cpython-312.pyc,,
pip/_vendor/pygments/__pycache__/lexer.cpython-312.pyc,,
pip/_vendor/pygments/__pycache__/modeline.cpython-312.pyc,,
pip/_vendor/pygments/__pycache__/plugin.cpython-312.pyc,,
pip/_vendor/pygments/__pycache__/regexopt.cpython-312.pyc,,
pip/_vendor/pygments/__pycache__/scanner.cpython-312.pyc,,
pip/_vendor/pygments/__pycache__/sphinxext.cpython-312.pyc,,
pip/_vendor/pygments/__pycache__/style.cpython-312.pyc,,
pip/_vendor/pygments/__pycache__/token.cpython-312.pyc,,
pip/_vendor/pygments/__pycache__/unistring.cpython-312.pyc,,
pip/_vendor/pygments/__pycache__/util.cpython-312.pyc,,
pip/_vendor/pygments/cmdline.py,sha256=LIVzmAunlk9sRJJp54O4KRy9GDIN4Wu13v9p9QzfGPM,23656
pip/_vendor/pygments/console.py,sha256=yhP9UsLAVmWKVQf2446JJewkA7AiXeeTf4Ieg3Oi2fU,1718
pip/_vendor/pygments/filter.py,sha256=_ADNPCskD8_GmodHi6_LoVgPU3Zh336aBCT5cOeTMs0,1910
pip/_vendor/pygments/filters/__init__.py,sha256=RdedK2KWKXlKwR7cvkfr3NUj9YiZQgMgilRMFUg2jPA,40392
pip/_vendor/pygments/filters/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/pygments/formatter.py,sha256=jDWBTndlBH2Z5IYZFVDnP0qn1CaTQjTWt7iAGtCnJEg,4390
pip/_vendor/pygments/formatters/__init__.py,sha256=8No-NUs8rBTSSBJIv4hSEQt2M0cFB4hwAT0snVc2QGE,5385
pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-312.pyc,,
pip/_vendor/pygments/formatters/__pycache__/bbcode.cpython-312.pyc,,
pip/_vendor/pygments/formatters/__pycache__/groff.cpython-312.pyc,,
pip/_vendor/pygments/formatters/__pycache__/html.cpython-312.pyc,,
pip/_vendor/pygments/formatters/__pycache__/img.cpython-312.pyc,,
pip/_vendor/pygments/formatters/__pycache__/irc.cpython-312.pyc,,
pip/_vendor/pygments/formatters/__pycache__/latex.cpython-312.pyc,,
pip/_vendor/pygments/formatters/__pycache__/other.cpython-312.pyc,,
pip/_vendor/pygments/formatters/__pycache__/pangomarkup.cpython-312.pyc,,
pip/_vendor/pygments/formatters/__pycache__/rtf.cpython-312.pyc,,
pip/_vendor/pygments/formatters/__pycache__/svg.cpython-312.pyc,,
pip/_vendor/pygments/formatters/__pycache__/terminal.cpython-312.pyc,,
pip/_vendor/pygments/formatters/__pycache__/terminal256.cpython-312.pyc,,
pip/_vendor/pygments/formatters/_mapping.py,sha256=1Cw37FuQlNacnxRKmtlPX4nyLoX9_ttko5ZwscNUZZ4,4176
pip/_vendor/pygments/formatters/bbcode.py,sha256=3JQLI45tcrQ_kRUMjuab6C7Hb0XUsbVWqqbSn9cMjkI,3320
pip/_vendor/pygments/formatters/groff.py,sha256=M39k0PaSSZRnxWjqBSVPkF0mu1-Vr7bm6RsFvs-CNN4,5106
pip/_vendor/pygments/formatters/html.py,sha256=SE2jc3YCqbMS3rZW9EAmDlAUhdVxJ52gA4dileEvCGU,35669
pip/_vendor/pygments/formatters/img.py,sha256=MwA4xWPLOwh6j7Yc6oHzjuqSPt0M1fh5r-5BTIIUfsU,23287
pip/_vendor/pygments/formatters/irc.py,sha256=dp1Z0l_ObJ5NFh9MhqLGg5ptG5hgJqedT2Vkutt9v0M,4981
pip/_vendor/pygments/formatters/latex.py,sha256=XMmhOCqUKDBQtG5mGJNAFYxApqaC5puo5cMmPfK3944,19306
pip/_vendor/pygments/formatters/other.py,sha256=56PMJOliin-rAUdnRM0i1wsV1GdUPd_dvQq0_UPfF9c,5034
pip/_vendor/pygments/formatters/pangomarkup.py,sha256=y16U00aVYYEFpeCfGXlYBSMacG425CbfoG8oKbKegIg,2218
pip/_vendor/pygments/formatters/rtf.py,sha256=ZT90dmcKyJboIB0mArhL7IhE467GXRN0G7QAUgG03To,11957
pip/_vendor/pygments/formatters/svg.py,sha256=KKsiophPupHuxm0So-MsbQEWOT54IAiSF7hZPmxtKXE,7174
pip/_vendor/pygments/formatters/terminal.py,sha256=AojNG4MlKq2L6IsC_VnXHu4AbHCBn9Otog6u45XvxeI,4674
pip/_vendor/pygments/formatters/terminal256.py,sha256=kGkNUVo3FpwjytIDS0if79EuUoroAprcWt3igrcIqT0,11753
pip/_vendor/pygments/lexer.py,sha256=TYHDt___gNW4axTl2zvPZff-VQi8fPaIh5OKRcVSjUM,35349
pip/_vendor/pygments/lexers/__init__.py,sha256=pIlxyQJuu_syh9lE080cq8ceVbEVcKp0osAFU5fawJU,12115
pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-312.pyc,,
pip/_vendor/pygments/lexers/__pycache__/python.cpython-312.pyc,,
pip/_vendor/pygments/lexers/_mapping.py,sha256=61-h3zr103m01OS5BUq_AfUiL9YI06Ves9ipQ7k4vr4,76097
pip/_vendor/pygments/lexers/python.py,sha256=2J_YJrPTr_A6fJY_qKiKv0GpgPwHMrlMSeo59qN3fe4,53687
pip/_vendor/pygments/modeline.py,sha256=gtRYZBS-CKOCDXHhGZqApboHBaZwGH8gznN3O6nuxj4,1005
pip/_vendor/pygments/plugin.py,sha256=ioeJ3QeoJ-UQhZpY9JL7vbxsTVuwwM7BCu-Jb8nN0AU,1891
pip/_vendor/pygments/regexopt.py,sha256=Hky4EB13rIXEHQUNkwmCrYqtIlnXDehNR3MztafZ43w,3072
pip/_vendor/pygments/scanner.py,sha256=NDy3ofK_fHRFK4hIDvxpamG871aewqcsIb6sgTi7Fhk,3092
pip/_vendor/pygments/sphinxext.py,sha256=iOptJBcqOGPwMEJ2p70PvwpZPIGdvdZ8dxvq6kzxDgA,7981
pip/_vendor/pygments/style.py,sha256=rSCZWFpg1_DwFMXDU0nEVmAcBHpuQGf9RxvOPPQvKLQ,6420
pip/_vendor/pygments/styles/__init__.py,sha256=qUk6_1z5KmT8EdJFZYgESmG6P_HJF_2vVrDD7HSCGYY,2042
pip/_vendor/pygments/styles/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/pygments/styles/__pycache__/_mapping.cpython-312.pyc,,
pip/_vendor/pygments/styles/_mapping.py,sha256=6lovFUE29tz6EsV3XYY4hgozJ7q1JL7cfO3UOlgnS8w,3312
pip/_vendor/pygments/token.py,sha256=qZwT7LSPy5YBY3JgDjut642CCy7JdQzAfmqD9NmT5j0,6226
pip/_vendor/pygments/unistring.py,sha256=p5c1i-HhoIhWemy9CUsaN9o39oomYHNxXll0Xfw6tEA,63208
pip/_vendor/pygments/util.py,sha256=2tj2nS1X9_OpcuSjf8dOET2bDVZhs8cEKd_uT6-Fgg8,10031
pip/_vendor/pyproject_hooks/__init__.py,sha256=kCehmy0UaBa9oVMD7ZIZrnswfnP3LXZ5lvnNJAL5JBM,491
pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/pyproject_hooks/__pycache__/_compat.cpython-312.pyc,,
pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-312.pyc,,
pip/_vendor/pyproject_hooks/_compat.py,sha256=by6evrYnqkisiM-MQcvOKs5bgDMzlOSgZqRHNqf04zE,138
pip/_vendor/pyproject_hooks/_impl.py,sha256=61GJxzQip0IInhuO69ZI5GbNQ82XEDUB_1Gg5_KtUoc,11920
pip/_vendor/pyproject_hooks/_in_process/__init__.py,sha256=9gQATptbFkelkIy0OfWFEACzqxXJMQDWCH9rBOAZVwQ,546
pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/pyproject_hooks/_in_process/__pycache__/_in_process.cpython-312.pyc,,
pip/_vendor/pyproject_hooks/_in_process/_in_process.py,sha256=m2b34c917IW5o-Q_6TYIHlsK9lSUlNiyrITTUH_zwew,10927
pip/_vendor/requests/__init__.py,sha256=HlB_HzhrzGtfD_aaYUwUh1zWXLZ75_YCLyit75d0Vz8,5057
pip/_vendor/requests/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/requests/__pycache__/__version__.cpython-312.pyc,,
pip/_vendor/requests/__pycache__/_internal_utils.cpython-312.pyc,,
pip/_vendor/requests/__pycache__/adapters.cpython-312.pyc,,
pip/_vendor/requests/__pycache__/api.cpython-312.pyc,,
pip/_vendor/requests/__pycache__/auth.cpython-312.pyc,,
pip/_vendor/requests/__pycache__/certs.cpython-312.pyc,,
pip/_vendor/requests/__pycache__/compat.cpython-312.pyc,,
pip/_vendor/requests/__pycache__/cookies.cpython-312.pyc,,
pip/_vendor/requests/__pycache__/exceptions.cpython-312.pyc,,
pip/_vendor/requests/__pycache__/help.cpython-312.pyc,,
pip/_vendor/requests/__pycache__/hooks.cpython-312.pyc,,
pip/_vendor/requests/__pycache__/models.cpython-312.pyc,,
pip/_vendor/requests/__pycache__/packages.cpython-312.pyc,,
pip/_vendor/requests/__pycache__/sessions.cpython-312.pyc,,
pip/_vendor/requests/__pycache__/status_codes.cpython-312.pyc,,
pip/_vendor/requests/__pycache__/structures.cpython-312.pyc,,
pip/_vendor/requests/__pycache__/utils.cpython-312.pyc,,
pip/_vendor/requests/__version__.py,sha256=FVfglgZmNQnmYPXpOohDU58F5EUb_-VnSTaAesS187g,435
pip/_vendor/requests/_internal_utils.py,sha256=nMQymr4hs32TqVo5AbCrmcJEhvPUh7xXlluyqwslLiQ,1495
pip/_vendor/requests/adapters.py,sha256=J7VeVxKBvawbtlX2DERVo05J9BXTcWYLMHNd1Baa-bk,27607
pip/_vendor/requests/api.py,sha256=_Zb9Oa7tzVIizTKwFrPjDEY9ejtm_OnSRERnADxGsQs,6449
pip/_vendor/requests/auth.py,sha256=kF75tqnLctZ9Mf_hm9TZIj4cQWnN5uxRz8oWsx5wmR0,10186
pip/_vendor/requests/certs.py,sha256=PVPooB0jP5hkZEULSCwC074532UFbR2Ptgu0I5zwmCs,575
pip/_vendor/requests/compat.py,sha256=Mo9f9xZpefod8Zm-n9_StJcVTmwSukXR2p3IQyyVXvU,1485
pip/_vendor/requests/cookies.py,sha256=bNi-iqEj4NPZ00-ob-rHvzkvObzN3lEpgw3g6paS3Xw,18590
pip/_vendor/requests/exceptions.py,sha256=D1wqzYWne1mS2rU43tP9CeN1G7QAy7eqL9o1god6Ejw,4272
pip/_vendor/requests/help.py,sha256=hRKaf9u0G7fdwrqMHtF3oG16RKktRf6KiwtSq2Fo1_0,3813
pip/_vendor/requests/hooks.py,sha256=CiuysiHA39V5UfcCBXFIx83IrDpuwfN9RcTUgv28ftQ,733
pip/_vendor/requests/models.py,sha256=x4K4CmH-lC0l2Kb-iPfMN4dRXxHEcbOaEWBL_i09AwI,35483
pip/_vendor/requests/packages.py,sha256=_ZQDCJTJ8SP3kVWunSqBsRZNPzj2c1WFVqbdr08pz3U,1057
pip/_vendor/requests/sessions.py,sha256=ykTI8UWGSltOfH07HKollH7kTBGw4WhiBVaQGmckTw4,30495
pip/_vendor/requests/status_codes.py,sha256=iJUAeA25baTdw-6PfD0eF4qhpINDJRJI-yaMqxs4LEI,4322
pip/_vendor/requests/structures.py,sha256=-IbmhVz06S-5aPSZuUthZ6-6D9XOjRuTXHOabY041XM,2912
pip/_vendor/requests/utils.py,sha256=L79vnFbzJ3SFLKtJwpoWe41Tozi3RlZv94pY1TFIyow,33631
pip/_vendor/resolvelib/__init__.py,sha256=h509TdEcpb5-44JonaU3ex2TM15GVBLjM9CNCPwnTTs,537
pip/_vendor/resolvelib/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/resolvelib/__pycache__/providers.cpython-312.pyc,,
pip/_vendor/resolvelib/__pycache__/reporters.cpython-312.pyc,,
pip/_vendor/resolvelib/__pycache__/resolvers.cpython-312.pyc,,
pip/_vendor/resolvelib/__pycache__/structs.cpython-312.pyc,,
pip/_vendor/resolvelib/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-312.pyc,,
pip/_vendor/resolvelib/compat/collections_abc.py,sha256=uy8xUZ-NDEw916tugUXm8HgwCGiMO0f-RcdnpkfXfOs,156
pip/_vendor/resolvelib/providers.py,sha256=fuuvVrCetu5gsxPB43ERyjfO8aReS3rFQHpDgiItbs4,5871
pip/_vendor/resolvelib/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_vendor/resolvelib/reporters.py,sha256=TSbRmWzTc26w0ggsV1bxVpeWDB8QNIre6twYl7GIZBE,1601
pip/_vendor/resolvelib/resolvers.py,sha256=G8rsLZSq64g5VmIq-lB7UcIJ1gjAxIQJmTF4REZleQ0,20511
pip/_vendor/resolvelib/structs.py,sha256=0_1_XO8z_CLhegP3Vpf9VJ3zJcfLm0NOHRM-i0Ykz3o,4963
pip/_vendor/rich/__init__.py,sha256=dRxjIL-SbFVY0q3IjSMrfgBTHrm1LZDgLOygVBwiYZc,6090
pip/_vendor/rich/__main__.py,sha256=eO7Cq8JnrgG8zVoeImiAs92q3hXNMIfp0w5lMsO7Q2Y,8477
pip/_vendor/rich/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/__main__.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/_cell_widths.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/_emoji_codes.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/_emoji_replace.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/_export_format.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/_extension.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/_fileno.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/_inspect.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/_log_render.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/_loop.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/_null_file.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/_palettes.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/_pick.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/_ratio.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/_spinners.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/_stack.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/_timer.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/_win32_console.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/_windows.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/_windows_renderer.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/_wrap.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/abc.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/align.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/ansi.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/bar.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/box.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/cells.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/color.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/color_triplet.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/columns.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/console.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/constrain.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/containers.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/control.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/default_styles.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/diagnose.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/emoji.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/errors.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/file_proxy.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/filesize.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/highlighter.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/json.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/jupyter.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/layout.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/live.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/live_render.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/logging.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/markup.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/measure.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/padding.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/pager.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/palette.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/panel.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/pretty.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/progress.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/progress_bar.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/prompt.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/protocol.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/region.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/repr.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/rule.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/scope.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/screen.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/segment.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/spinner.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/status.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/style.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/styled.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/syntax.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/table.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/terminal_theme.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/text.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/theme.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/themes.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/traceback.cpython-312.pyc,,
pip/_vendor/rich/__pycache__/tree.cpython-312.pyc,,
pip/_vendor/rich/_cell_widths.py,sha256=fbmeyetEdHjzE_Vx2l1uK7tnPOhMs2X1lJfO3vsKDpA,10209
pip/_vendor/rich/_emoji_codes.py,sha256=hu1VL9nbVdppJrVoijVshRlcRRe_v3dju3Mmd2sKZdY,140235
pip/_vendor/rich/_emoji_replace.py,sha256=n-kcetsEUx2ZUmhQrfeMNc-teeGhpuSQ5F8VPBsyvDo,1064
pip/_vendor/rich/_export_format.py,sha256=RI08pSrm5tBSzPMvnbTqbD9WIalaOoN5d4M1RTmLq1Y,2128
pip/_vendor/rich/_extension.py,sha256=Xt47QacCKwYruzjDi-gOBq724JReDj9Cm9xUi5fr-34,265
pip/_vendor/rich/_fileno.py,sha256=HWZxP5C2ajMbHryvAQZseflVfQoGzsKOHzKGsLD8ynQ,799
pip/_vendor/rich/_inspect.py,sha256=oZJGw31e64dwXSCmrDnvZbwVb1ZKhWfU8wI3VWohjJk,9695
pip/_vendor/rich/_log_render.py,sha256=1ByI0PA1ZpxZY3CGJOK54hjlq4X-Bz_boIjIqCd8Kns,3225
pip/_vendor/rich/_loop.py,sha256=hV_6CLdoPm0va22Wpw4zKqM0RYsz3TZxXj0PoS-9eDQ,1236
pip/_vendor/rich/_null_file.py,sha256=tGSXk_v-IZmbj1GAzHit8A3kYIQMiCpVsCFfsC-_KJ4,1387
pip/_vendor/rich/_palettes.py,sha256=cdev1JQKZ0JvlguV9ipHgznTdnvlIzUFDBb0It2PzjI,7063
pip/_vendor/rich/_pick.py,sha256=evDt8QN4lF5CiwrUIXlOJCntitBCOsI3ZLPEIAVRLJU,423
pip/_vendor/rich/_ratio.py,sha256=Zt58apszI6hAAcXPpgdWKpu3c31UBWebOeR4mbyptvU,5471
pip/_vendor/rich/_spinners.py,sha256=U2r1_g_1zSjsjiUdAESc2iAMc3i4ri_S8PYP6kQ5z1I,19919
pip/_vendor/rich/_stack.py,sha256=-C8OK7rxn3sIUdVwxZBBpeHhIzX0eI-VM3MemYfaXm0,351
pip/_vendor/rich/_timer.py,sha256=zelxbT6oPFZnNrwWPpc1ktUeAT-Vc4fuFcRZLQGLtMI,417
pip/_vendor/rich/_win32_console.py,sha256=P0vxI2fcndym1UU1S37XAzQzQnkyY7YqAKmxm24_gug,22820
pip/_vendor/rich/_windows.py,sha256=aBwaD_S56SbgopIvayVmpk0Y28uwY2C5Bab1wl3Bp-I,1925
pip/_vendor/rich/_windows_renderer.py,sha256=t74ZL3xuDCP3nmTp9pH1L5LiI2cakJuQRQleHCJerlk,2783
pip/_vendor/rich/_wrap.py,sha256=FlSsom5EX0LVkA3KWy34yHnCfLtqX-ZIepXKh-70rpc,3404
pip/_vendor/rich/abc.py,sha256=ON-E-ZqSSheZ88VrKX2M3PXpFbGEUUZPMa_Af0l-4f0,890
pip/_vendor/rich/align.py,sha256=sCUkisXkQfoq-IQPyBELfJ8l7LihZJX3HbH8K7Cie-M,10368
pip/_vendor/rich/ansi.py,sha256=iD6532QYqnBm6hADulKjrV8l8kFJ-9fEVooHJHH3hMg,6906
pip/_vendor/rich/bar.py,sha256=ldbVHOzKJOnflVNuv1xS7g6dLX2E3wMnXkdPbpzJTcs,3263
pip/_vendor/rich/box.py,sha256=nr5fYIUghB_iUCEq6y0Z3LlCT8gFPDrzN9u2kn7tJl4,10831
pip/_vendor/rich/cells.py,sha256=aMmGK4BjXhgE6_JF1ZEGmW3O7mKkE8g84vUnj4Et4To,4780
pip/_vendor/rich/color.py,sha256=bCRATVdRe5IClJ6Hl62de2PKQ_U4i2MZ4ugjUEg7Tao,18223
pip/_vendor/rich/color_triplet.py,sha256=3lhQkdJbvWPoLDO-AnYImAWmJvV5dlgYNCVZ97ORaN4,1054
pip/_vendor/rich/columns.py,sha256=HUX0KcMm9dsKNi11fTbiM_h2iDtl8ySCaVcxlalEzq8,7131
pip/_vendor/rich/console.py,sha256=deFZIubq2M9A2MCsKFAsFQlWDvcOMsGuUA07QkOaHIw,99173
pip/_vendor/rich/constrain.py,sha256=1VIPuC8AgtKWrcncQrjBdYqA3JVWysu6jZo1rrh7c7Q,1288
pip/_vendor/rich/containers.py,sha256=c_56TxcedGYqDepHBMTuZdUIijitAQgnox-Qde0Z1qo,5502
pip/_vendor/rich/control.py,sha256=DSkHTUQLorfSERAKE_oTAEUFefZnZp4bQb4q8rHbKws,6630
pip/_vendor/rich/default_styles.py,sha256=-Fe318kMVI_IwciK5POpThcO0-9DYJ67TZAN6DlmlmM,8082
pip/_vendor/rich/diagnose.py,sha256=an6uouwhKPAlvQhYpNNpGq9EJysfMIOvvCbO3oSoR24,972
pip/_vendor/rich/emoji.py,sha256=omTF9asaAnsM4yLY94eR_9dgRRSm1lHUszX20D1yYCQ,2501
pip/_vendor/rich/errors.py,sha256=5pP3Kc5d4QJ_c0KFsxrfyhjiPVe7J1zOqSFbFAzcV-Y,642
pip/_vendor/rich/file_proxy.py,sha256=Tl9THMDZ-Pk5Wm8sI1gGg_U5DhusmxD-FZ0fUbcU0W0,1683
pip/_vendor/rich/filesize.py,sha256=9fTLAPCAwHmBXdRv7KZU194jSgNrRb6Wx7RIoBgqeKY,2508
pip/_vendor/rich/highlighter.py,sha256=6ZAjUcNhBRajBCo9umFUclyi2xL0-55JL7S0vYGUJu4,9585
pip/_vendor/rich/json.py,sha256=vVEoKdawoJRjAFayPwXkMBPLy7RSTs-f44wSQDR2nJ0,5031
pip/_vendor/rich/jupyter.py,sha256=QyoKoE_8IdCbrtiSHp9TsTSNyTHY0FO5whE7jOTd9UE,3252
pip/_vendor/rich/layout.py,sha256=ajkSFAtEVv9EFTcFs-w4uZfft7nEXhNzL7ZVdgrT5rI,14004
pip/_vendor/rich/live.py,sha256=vUcnJV2LMSK3sQNaILbm0-_B8BpAeiHfcQMAMLfpRe0,14271
pip/_vendor/rich/live_render.py,sha256=zJtB471jGziBtEwxc54x12wEQtH4BuQr1SA8v9kU82w,3666
pip/_vendor/rich/logging.py,sha256=uB-cB-3Q4bmXDLLpbOWkmFviw-Fde39zyMV6tKJ2WHQ,11903
pip/_vendor/rich/markup.py,sha256=3euGKP5s41NCQwaSjTnJxus5iZMHjxpIM0W6fCxra38,8451
pip/_vendor/rich/measure.py,sha256=HmrIJX8sWRTHbgh8MxEay_83VkqNW_70s8aKP5ZcYI8,5305
pip/_vendor/rich/padding.py,sha256=kTFGsdGe0os7tXLnHKpwTI90CXEvrceeZGCshmJy5zw,4970
pip/_vendor/rich/pager.py,sha256=SO_ETBFKbg3n_AgOzXm41Sv36YxXAyI3_R-KOY2_uSc,828
pip/_vendor/rich/palette.py,sha256=lInvR1ODDT2f3UZMfL1grq7dY_pDdKHw4bdUgOGaM4Y,3396
pip/_vendor/rich/panel.py,sha256=2Fd1V7e1kHxlPFIusoHY5T7-Cs0RpkrihgVG9ZVqJ4g,10705
pip/_vendor/rich/pretty.py,sha256=5oIHP_CGWnHEnD0zMdW5qfGC5kHqIKn7zH_eC4crULE,35848
pip/_vendor/rich/progress.py,sha256=P02xi7T2Ua3qq17o83bkshe4c0v_45cg8VyTj6US6Vg,59715
pip/_vendor/rich/progress_bar.py,sha256=L4jw8E6Qb_x-jhOrLVhkuMaPmiAhFIl8jHQbWFrKuR8,8164
pip/_vendor/rich/prompt.py,sha256=wdOn2X8XTJKnLnlw6PoMY7xG4iUPp3ezt4O5gqvpV-E,11304
pip/_vendor/rich/protocol.py,sha256=5hHHDDNHckdk8iWH5zEbi-zuIVSF5hbU2jIo47R7lTE,1391
pip/_vendor/rich/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_vendor/rich/region.py,sha256=rNT9xZrVZTYIXZC0NYn41CJQwYNbR-KecPOxTgQvB8Y,166
pip/_vendor/rich/repr.py,sha256=5MZJZmONgC6kud-QW-_m1okXwL2aR6u6y-pUcUCJz28,4431
pip/_vendor/rich/rule.py,sha256=0fNaS_aERa3UMRc3T5WMpN_sumtDxfaor2y3of1ftBk,4602
pip/_vendor/rich/scope.py,sha256=TMUU8qo17thyqQCPqjDLYpg_UU1k5qVd-WwiJvnJVas,2843
pip/_vendor/rich/screen.py,sha256=YoeReESUhx74grqb0mSSb9lghhysWmFHYhsbMVQjXO8,1591
pip/_vendor/rich/segment.py,sha256=hU1ueeXqI6YeFa08K9DAjlF2QLxcJY9pwZx7RsXavlk,24246
pip/_vendor/rich/spinner.py,sha256=15koCmF0DQeD8-k28Lpt6X_zJQUlzEhgo_6A6uy47lc,4339
pip/_vendor/rich/status.py,sha256=kkPph3YeAZBo-X-4wPp8gTqZyU466NLwZBA4PZTTewo,4424
pip/_vendor/rich/style.py,sha256=3hiocH_4N8vwRm3-8yFWzM7tSwjjEven69XqWasSQwM,27073
pip/_vendor/rich/styled.py,sha256=eZNnzGrI4ki_54pgY3Oj0T-x3lxdXTYh4_ryDB24wBU,1258
pip/_vendor/rich/syntax.py,sha256=TnZDuOD4DeHFbkaVEAji1gf8qgAlMU9Boe_GksMGCkk,35475
pip/_vendor/rich/table.py,sha256=nGEvAZHF4dy1vT9h9Gj9O5qhSQO3ODAxJv0RY1vnIB8,39680
pip/_vendor/rich/terminal_theme.py,sha256=1j5-ufJfnvlAo5Qsi_ACZiXDmwMXzqgmFByObT9-yJY,3370
pip/_vendor/rich/text.py,sha256=5rQ3zvNrg5UZKNLecbh7fiw9v3HeFulNVtRY_CBDjjE,47312
pip/_vendor/rich/theme.py,sha256=belFJogzA0W0HysQabKaHOc3RWH2ko3fQAJhoN-AFdo,3777
pip/_vendor/rich/themes.py,sha256=0xgTLozfabebYtcJtDdC5QkX5IVUEaviqDUJJh4YVFk,102
pip/_vendor/rich/traceback.py,sha256=CUpxYLjQWIb6vQQ6O72X0hvDV6caryGqU6UweHgOyCY,29601
pip/_vendor/rich/tree.py,sha256=meAOUU6sYnoBEOX2ILrPLY9k5bWrWNQKkaiEFvHinXM,9167
pip/_vendor/tomli/__init__.py,sha256=JhUwV66DB1g4Hvt1UQCVMdfCu-IgAV8FXmvDU9onxd4,396
pip/_vendor/tomli/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/tomli/__pycache__/_parser.cpython-312.pyc,,
pip/_vendor/tomli/__pycache__/_re.cpython-312.pyc,,
pip/_vendor/tomli/__pycache__/_types.cpython-312.pyc,,
pip/_vendor/tomli/_parser.py,sha256=g9-ENaALS-B8dokYpCuzUFalWlog7T-SIYMjLZSWrtM,22633
pip/_vendor/tomli/_re.py,sha256=dbjg5ChZT23Ka9z9DHOXfdtSpPwUfdgMXnj8NOoly-w,2943
pip/_vendor/tomli/_types.py,sha256=-GTG2VUqkpxwMqzmVO4F7ybKddIbAnuAHXfmWQcTi3Q,254
pip/_vendor/tomli/py.typed,sha256=8PjyZ1aVoQpRVvt71muvuq5qE-jTFZkK-GLHkhdebmc,26
pip/_vendor/truststore/__init__.py,sha256=M-PhuLMIF7gxKXk7tpo2MD7dk6nqG1ae8GXWdNXbMdQ,403
pip/_vendor/truststore/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/truststore/__pycache__/_api.cpython-312.pyc,,
pip/_vendor/truststore/__pycache__/_macos.cpython-312.pyc,,
pip/_vendor/truststore/__pycache__/_openssl.cpython-312.pyc,,
pip/_vendor/truststore/__pycache__/_ssl_constants.cpython-312.pyc,,
pip/_vendor/truststore/__pycache__/_windows.cpython-312.pyc,,
pip/_vendor/truststore/_api.py,sha256=B9JIHipzBIS8pMP_J50-o1DHVZsvKZQUXTB0HQQ_UPg,10461
pip/_vendor/truststore/_macos.py,sha256=VJ24avz5aEGYAs_kWvnGjMJtuIP4xJcYa459UQOQC3M,17608
pip/_vendor/truststore/_openssl.py,sha256=LLUZ7ZGaio-i5dpKKjKCSeSufmn6T8pi9lDcFnvSyq0,2324
pip/_vendor/truststore/_ssl_constants.py,sha256=NUD4fVKdSD02ri7-db0tnO0VqLP9aHuzmStcW7tAl08,1130
pip/_vendor/truststore/_windows.py,sha256=eldNViHNHeY5r3fiBoz_JFGD37atXB9S5yaRoPKEGAA,17891
pip/_vendor/truststore/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_vendor/typing_extensions.py,sha256=78hFl0HpDY-ylHUVCnWdU5nTHxUP2-S-3wEZk6CQmLk,134499
pip/_vendor/urllib3/__init__.py,sha256=iXLcYiJySn0GNbWOOZDDApgBL1JgP44EZ8i1760S8Mc,3333
pip/_vendor/urllib3/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/urllib3/__pycache__/_collections.cpython-312.pyc,,
pip/_vendor/urllib3/__pycache__/_version.cpython-312.pyc,,
pip/_vendor/urllib3/__pycache__/connection.cpython-312.pyc,,
pip/_vendor/urllib3/__pycache__/connectionpool.cpython-312.pyc,,
pip/_vendor/urllib3/__pycache__/exceptions.cpython-312.pyc,,
pip/_vendor/urllib3/__pycache__/fields.cpython-312.pyc,,
pip/_vendor/urllib3/__pycache__/filepost.cpython-312.pyc,,
pip/_vendor/urllib3/__pycache__/poolmanager.cpython-312.pyc,,
pip/_vendor/urllib3/__pycache__/request.cpython-312.pyc,,
pip/_vendor/urllib3/__pycache__/response.cpython-312.pyc,,
pip/_vendor/urllib3/_collections.py,sha256=pyASJJhW7wdOpqJj9QJA8FyGRfr8E8uUUhqUvhF0728,11372
pip/_vendor/urllib3/_version.py,sha256=cuJvnSrWxXGYgQ3-ZRoPMw8-qaN5tpw71jnH1t16dLA,64
pip/_vendor/urllib3/connection.py,sha256=92k9td_y4PEiTIjNufCUa1NzMB3J3w0LEdyokYgXnW8,20300
pip/_vendor/urllib3/connectionpool.py,sha256=Be6q65SR9laoikg-h_jmc_p8OWtEmwgq_Om_Xtig-2M,40285
pip/_vendor/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-312.pyc,,
pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-312.pyc,,
pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-312.pyc,,
pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-312.pyc,,
pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-312.pyc,,
pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-312.pyc,,
pip/_vendor/urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957
pip/_vendor/urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-312.pyc,,
pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-312.pyc,,
pip/_vendor/urllib3/contrib/_securetransport/bindings.py,sha256=4Xk64qIkPBt09A5q-RIFUuDhNc9mXilVapm7WnYnzRw,17632
pip/_vendor/urllib3/contrib/_securetransport/low_level.py,sha256=B2JBB2_NRP02xK6DCa1Pa9IuxrPwxzDzZbixQkb7U9M,13922
pip/_vendor/urllib3/contrib/appengine.py,sha256=VR68eAVE137lxTgjBDwCna5UiBZTOKa01Aj_-5BaCz4,11036
pip/_vendor/urllib3/contrib/ntlmpool.py,sha256=NlfkW7WMdW8ziqudopjHoW299og1BTWi0IeIibquFwk,4528
pip/_vendor/urllib3/contrib/pyopenssl.py,sha256=hDJh4MhyY_p-oKlFcYcQaVQRDv6GMmBGuW9yjxyeejM,17081
pip/_vendor/urllib3/contrib/securetransport.py,sha256=Fef1IIUUFHqpevzXiDPbIGkDKchY2FVKeVeLGR1Qq3g,34446
pip/_vendor/urllib3/contrib/socks.py,sha256=aRi9eWXo9ZEb95XUxef4Z21CFlnnjbEiAo9HOseoMt4,7097
pip/_vendor/urllib3/exceptions.py,sha256=0Mnno3KHTNfXRfY7638NufOPkUb6mXOm-Lqj-4x2w8A,8217
pip/_vendor/urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579
pip/_vendor/urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440
pip/_vendor/urllib3/packages/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/urllib3/packages/__pycache__/six.cpython-312.pyc,,
pip/_vendor/urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-312.pyc,,
pip/_vendor/urllib3/packages/backports/__pycache__/weakref_finalize.cpython-312.pyc,,
pip/_vendor/urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417
pip/_vendor/urllib3/packages/backports/weakref_finalize.py,sha256=tRCal5OAhNSRyb0DhHp-38AtIlCsRP8BxF3NX-6rqIA,5343
pip/_vendor/urllib3/packages/six.py,sha256=b9LM0wBXv7E7SrbCjAm4wwN-hrH-iNxv18LgWNMMKPo,34665
pip/_vendor/urllib3/poolmanager.py,sha256=aWyhXRtNO4JUnCSVVqKTKQd8EXTvUm1VN9pgs2bcONo,19990
pip/_vendor/urllib3/request.py,sha256=YTWFNr7QIwh7E1W9dde9LM77v2VWTJ5V78XuTTw7D1A,6691
pip/_vendor/urllib3/response.py,sha256=fmDJAFkG71uFTn-sVSTh2Iw0WmcXQYqkbRjihvwBjU8,30641
pip/_vendor/urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155
pip/_vendor/urllib3/util/__pycache__/__init__.cpython-312.pyc,,
pip/_vendor/urllib3/util/__pycache__/connection.cpython-312.pyc,,
pip/_vendor/urllib3/util/__pycache__/proxy.cpython-312.pyc,,
pip/_vendor/urllib3/util/__pycache__/queue.cpython-312.pyc,,
pip/_vendor/urllib3/util/__pycache__/request.cpython-312.pyc,,
pip/_vendor/urllib3/util/__pycache__/response.cpython-312.pyc,,
pip/_vendor/urllib3/util/__pycache__/retry.cpython-312.pyc,,
pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-312.pyc,,
pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-312.pyc,,
pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-312.pyc,,
pip/_vendor/urllib3/util/__pycache__/timeout.cpython-312.pyc,,
pip/_vendor/urllib3/util/__pycache__/url.cpython-312.pyc,,
pip/_vendor/urllib3/util/__pycache__/wait.cpython-312.pyc,,
pip/_vendor/urllib3/util/connection.py,sha256=5Lx2B1PW29KxBn2T0xkN1CBgRBa3gGVJBKoQoRogEVk,4901
pip/_vendor/urllib3/util/proxy.py,sha256=zUvPPCJrp6dOF0N4GAVbOcl6o-4uXKSrGiTkkr5vUS4,1605
pip/_vendor/urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498
pip/_vendor/urllib3/util/request.py,sha256=C0OUt2tcU6LRiQJ7YYNP9GvPrSvl7ziIBekQ-5nlBZk,3997
pip/_vendor/urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510
pip/_vendor/urllib3/util/retry.py,sha256=Z6WEf518eTOXP5jr5QSQ9gqJI0DVYt3Xs3EKnYaTmus,22013
pip/_vendor/urllib3/util/ssl_.py,sha256=X4-AqW91aYPhPx6-xbf66yHFQKbqqfC_5Zt4WkLX1Hc,17177
pip/_vendor/urllib3/util/ssl_match_hostname.py,sha256=Ir4cZVEjmAk8gUAIHWSi7wtOO83UCYABY2xFD1Ql_WA,5758
pip/_vendor/urllib3/util/ssltransport.py,sha256=NA-u5rMTrDFDFC8QzRKUEKMG0561hOD4qBTr3Z4pv6E,6895
pip/_vendor/urllib3/util/timeout.py,sha256=cwq4dMk87mJHSBktK1miYJ-85G-3T3RmT20v7SFCpno,10168
pip/_vendor/urllib3/util/url.py,sha256=lCAE7M5myA8EDdW0sJuyyZhVB9K_j38ljWhHAnFaWoE,14296
pip/_vendor/urllib3/util/wait.py,sha256=fOX0_faozG2P7iVojQoE1mbydweNyTcm-hXEfFrTtLI,5403
pip/_vendor/vendor.txt,sha256=PxNaxxkkpBaw5zOTsDpHEY-zEaHjgkDgyrSxOuxg8nw,330
pip/py.typed,sha256=EBVvvPRTn_eIpz5e5QztSCdrMX7Qwd7VP93RSoIlZ2I,286

View File

@ -1,5 +0,0 @@
Wheel-Version: 1.0
Generator: setuptools (71.1.0)
Root-Is-Purelib: true
Tag: py3-none-any

View File

@ -1,3 +0,0 @@
[console_scripts]
pip = pip._internal.cli.main:main
pip3 = pip._internal.cli.main:main

View File

@ -1,9 +1,9 @@
from typing import List, Optional
from __future__ import annotations
__version__ = "24.2"
__version__ = "25.3"
def main(args: Optional[List[str]] = None) -> int:
def main(args: list[str] | None = None) -> int:
"""This is an internal API only meant for use by pip's own console scripts.
For additional details, see https://github.com/pypa/pip/issues/7498.

View File

@ -9,7 +9,7 @@ an import statement.
import sys
# Copied from pyproject.toml
PYTHON_REQUIRES = (3, 8)
PYTHON_REQUIRES = (3, 9)
def version_str(version): # type: ignore

View File

@ -1,4 +1,4 @@
from typing import List, Optional
from __future__ import annotations
from pip._internal.utils import _log
@ -7,7 +7,7 @@ from pip._internal.utils import _log
_log.init_logging()
def main(args: Optional[List[str]] = None) -> int:
def main(args: list[str] | None = None) -> int:
"""This is preserved for old console scripts that may still be referencing
it.

View File

@ -1,5 +1,6 @@
"""Build Environment used for isolation during sdist building
"""
"""Build Environment used for isolation during sdist building"""
from __future__ import annotations
import logging
import os
@ -8,16 +9,17 @@ import site
import sys
import textwrap
from collections import OrderedDict
from collections.abc import Iterable
from types import TracebackType
from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type, Union
from typing import TYPE_CHECKING, Protocol, TypedDict
from pip._vendor.certifi import where
from pip._vendor.packaging.version import Version
from pip import __file__ as pip_location
from pip._internal.cli.spinners import open_spinner
from pip._internal.locations import get_platlib, get_purelib, get_scheme
from pip._internal.metadata import get_default_environment, get_environment
from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.logging import VERBOSE
from pip._internal.utils.packaging import get_requirement
from pip._internal.utils.subprocess import call_subprocess
@ -25,11 +27,16 @@ from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
if TYPE_CHECKING:
from pip._internal.index.package_finder import PackageFinder
from pip._internal.req.req_install import InstallRequirement
class ExtraEnviron(TypedDict, total=False):
extra_environ: dict[str, str]
logger = logging.getLogger(__name__)
def _dedup(a: str, b: str) -> Union[Tuple[str], Tuple[str, str]]:
def _dedup(a: str, b: str) -> tuple[str] | tuple[str, str]:
return (a, b) if a != b else (a,)
@ -58,7 +65,7 @@ def get_runnable_pip() -> str:
return os.fsdecode(source / "__pip-runner__.py")
def _get_system_sitepackages() -> Set[str]:
def _get_system_sitepackages() -> set[str]:
"""Get system site packages
Usually from site.getsitepackages,
@ -78,10 +85,171 @@ def _get_system_sitepackages() -> Set[str]:
return {os.path.normcase(path) for path in system_sites}
class BuildEnvironmentInstaller(Protocol):
"""
Interface for installing build dependencies into an isolated build
environment.
"""
def install(
self,
requirements: Iterable[str],
prefix: _Prefix,
*,
kind: str,
for_req: InstallRequirement | None,
) -> None: ...
class SubprocessBuildEnvironmentInstaller:
"""
Install build dependencies by calling pip in a subprocess.
"""
def __init__(
self,
finder: PackageFinder,
build_constraints: list[str] | None = None,
build_constraint_feature_enabled: bool = False,
) -> None:
self.finder = finder
self._build_constraints = build_constraints or []
self._build_constraint_feature_enabled = build_constraint_feature_enabled
def _deprecation_constraint_check(self) -> None:
"""
Check for deprecation warning: PIP_CONSTRAINT affecting build environments.
This warns when build-constraint feature is NOT enabled and PIP_CONSTRAINT
is not empty.
"""
if self._build_constraint_feature_enabled or self._build_constraints:
return
pip_constraint = os.environ.get("PIP_CONSTRAINT")
if not pip_constraint or not pip_constraint.strip():
return
deprecated(
reason=(
"Setting PIP_CONSTRAINT will not affect "
"build constraints in the future,"
),
replacement=(
"to specify build constraints using --build-constraint or "
"PIP_BUILD_CONSTRAINT. To disable this warning without "
"any build constraints set --use-feature=build-constraint or "
'PIP_USE_FEATURE="build-constraint"'
),
gone_in="26.2",
issue=None,
)
def install(
self,
requirements: Iterable[str],
prefix: _Prefix,
*,
kind: str,
for_req: InstallRequirement | None,
) -> None:
self._deprecation_constraint_check()
finder = self.finder
args: list[str] = [
sys.executable,
get_runnable_pip(),
"install",
"--ignore-installed",
"--no-user",
"--prefix",
prefix.path,
"--no-warn-script-location",
"--disable-pip-version-check",
# As the build environment is ephemeral, it's wasteful to
# pre-compile everything, especially as not every Python
# module will be used/compiled in most cases.
"--no-compile",
# The prefix specified two lines above, thus
# target from config file or env var should be ignored
"--target",
"",
]
if logger.getEffectiveLevel() <= logging.DEBUG:
args.append("-vv")
elif logger.getEffectiveLevel() <= VERBOSE:
args.append("-v")
for format_control in ("no_binary", "only_binary"):
formats = getattr(finder.format_control, format_control)
args.extend(
(
"--" + format_control.replace("_", "-"),
",".join(sorted(formats or {":none:"})),
)
)
index_urls = finder.index_urls
if index_urls:
args.extend(["-i", index_urls[0]])
for extra_index in index_urls[1:]:
args.extend(["--extra-index-url", extra_index])
else:
args.append("--no-index")
for link in finder.find_links:
args.extend(["--find-links", link])
if finder.proxy:
args.extend(["--proxy", finder.proxy])
for host in finder.trusted_hosts:
args.extend(["--trusted-host", host])
if finder.custom_cert:
args.extend(["--cert", finder.custom_cert])
if finder.client_cert:
args.extend(["--client-cert", finder.client_cert])
if finder.allow_all_prereleases:
args.append("--pre")
if finder.prefer_binary:
args.append("--prefer-binary")
# Handle build constraints
if self._build_constraint_feature_enabled:
args.extend(["--use-feature", "build-constraint"])
if self._build_constraints:
# Build constraints must be passed as both constraints
# and build constraints, so that nested builds receive
# build constraints
for constraint_file in self._build_constraints:
args.extend(["--constraint", constraint_file])
args.extend(["--build-constraint", constraint_file])
extra_environ: ExtraEnviron = {}
if self._build_constraint_feature_enabled and not self._build_constraints:
# If there are no build constraints but the build constraints
# feature is enabled then we must ignore regular constraints
# in the isolated build environment
extra_environ = {"extra_environ": {"_PIP_IN_BUILD_IGNORE_CONSTRAINTS": "1"}}
args.append("--")
args.extend(requirements)
identify_requirement = (
f" for {for_req.name}" if for_req and for_req.name else ""
)
with open_spinner(f"Installing {kind}") as spinner:
call_subprocess(
args,
command_desc=f"installing {kind}{identify_requirement}",
spinner=spinner,
**extra_environ,
)
class BuildEnvironment:
"""Creates and manages an isolated environment to install build deps"""
def __init__(self) -> None:
def __init__(self, installer: BuildEnvironmentInstaller) -> None:
self.installer = installer
temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV, globally_managed=True)
self._prefixes = OrderedDict(
@ -89,8 +257,8 @@ class BuildEnvironment:
for name in ("normal", "overlay")
)
self._bin_dirs: List[str] = []
self._lib_dirs: List[str] = []
self._bin_dirs: list[str] = []
self._lib_dirs: list[str] = []
for prefix in reversed(list(self._prefixes.values())):
self._bin_dirs.append(prefix.bin_dir)
self._lib_dirs.extend(prefix.lib_dirs)
@ -158,9 +326,9 @@ class BuildEnvironment:
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
for varname, old_value in self._save_env.items():
if old_value is None:
@ -170,7 +338,7 @@ class BuildEnvironment:
def check_requirements(
self, reqs: Iterable[str]
) -> Tuple[Set[Tuple[str, str]], Set[str]]:
) -> tuple[set[tuple[str, str]], set[str]]:
"""Return 2 sets:
- conflicting requirements: set of (installed, wanted) reqs tuples
- missing requirements: set of reqs
@ -204,84 +372,18 @@ class BuildEnvironment:
def install_requirements(
self,
finder: "PackageFinder",
requirements: Iterable[str],
prefix_as_string: str,
*,
kind: str,
for_req: InstallRequirement | None = None,
) -> None:
prefix = self._prefixes[prefix_as_string]
assert not prefix.setup
prefix.setup = True
if not requirements:
return
self._install_requirements(
get_runnable_pip(),
finder,
requirements,
prefix,
kind=kind,
)
@staticmethod
def _install_requirements(
pip_runnable: str,
finder: "PackageFinder",
requirements: Iterable[str],
prefix: _Prefix,
*,
kind: str,
) -> None:
args: List[str] = [
sys.executable,
pip_runnable,
"install",
"--ignore-installed",
"--no-user",
"--prefix",
prefix.path,
"--no-warn-script-location",
"--disable-pip-version-check",
]
if logger.getEffectiveLevel() <= logging.DEBUG:
args.append("-vv")
elif logger.getEffectiveLevel() <= VERBOSE:
args.append("-v")
for format_control in ("no_binary", "only_binary"):
formats = getattr(finder.format_control, format_control)
args.extend(
(
"--" + format_control.replace("_", "-"),
",".join(sorted(formats or {":none:"})),
)
)
index_urls = finder.index_urls
if index_urls:
args.extend(["-i", index_urls[0]])
for extra_index in index_urls[1:]:
args.extend(["--extra-index-url", extra_index])
else:
args.append("--no-index")
for link in finder.find_links:
args.extend(["--find-links", link])
for host in finder.trusted_hosts:
args.extend(["--trusted-host", host])
if finder.allow_all_prereleases:
args.append("--pre")
if finder.prefer_binary:
args.append("--prefer-binary")
args.append("--")
args.extend(requirements)
extra_environ = {"_PIP_STANDALONE_CERT": where()}
with open_spinner(f"Installing {kind}") as spinner:
call_subprocess(
args,
command_desc=f"pip subprocess to install {kind}",
spinner=spinner,
extra_environ=extra_environ,
)
self.installer.install(requirements, prefix, kind=kind, for_req=for_req)
class NoOpBuildEnvironment(BuildEnvironment):
@ -295,9 +397,9 @@ class NoOpBuildEnvironment(BuildEnvironment):
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
pass
@ -306,10 +408,10 @@ class NoOpBuildEnvironment(BuildEnvironment):
def install_requirements(
self,
finder: "PackageFinder",
requirements: Iterable[str],
prefix_as_string: str,
*,
kind: str,
for_req: InstallRequirement | None = None,
) -> None:
raise NotImplementedError()

View File

@ -1,12 +1,13 @@
"""Cache Management
"""
"""Cache Management"""
from __future__ import annotations
import hashlib
import json
import logging
import os
from pathlib import Path
from typing import Any, Dict, List, Optional
from typing import Any
from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
from pip._vendor.packaging.utils import canonicalize_name
@ -23,7 +24,7 @@ logger = logging.getLogger(__name__)
ORIGIN_JSON_NAME = "origin.json"
def _hash_dict(d: Dict[str, str]) -> str:
def _hash_dict(d: dict[str, str]) -> str:
"""Return a stable sha224 of a dictionary."""
s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
return hashlib.sha224(s.encode("ascii")).hexdigest()
@ -40,7 +41,7 @@ class Cache:
assert not cache_dir or os.path.isabs(cache_dir)
self.cache_dir = cache_dir or None
def _get_cache_path_parts(self, link: Link) -> List[str]:
def _get_cache_path_parts(self, link: Link) -> list[str]:
"""Get parts of part that must be os.path.joined with cache_dir"""
# We want to generate an url to use as our cache key, we don't want to
@ -73,7 +74,7 @@ class Cache:
return parts
def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]:
def _get_candidates(self, link: Link, canonical_package_name: str) -> list[Any]:
can_not_cache = not self.cache_dir or not canonical_package_name or not link
if can_not_cache:
return []
@ -90,8 +91,8 @@ class Cache:
def get(
self,
link: Link,
package_name: Optional[str],
supported_tags: List[Tag],
package_name: str | None,
supported_tags: list[Tag],
) -> Link:
"""Returns a link to a cached item if it exists, otherwise returns the
passed link.
@ -128,8 +129,8 @@ class SimpleWheelCache(Cache):
def get(
self,
link: Link,
package_name: Optional[str],
supported_tags: List[Tag],
package_name: str | None,
supported_tags: list[Tag],
) -> Link:
candidates = []
@ -142,7 +143,7 @@ class SimpleWheelCache(Cache):
wheel = Wheel(wheel_name)
except InvalidWheelFilename:
continue
if canonicalize_name(wheel.name) != canonical_package_name:
if wheel.name != canonical_package_name:
logger.debug(
"Ignoring cached wheel %s for %s as it "
"does not match the expected distribution name %s.",
@ -189,7 +190,7 @@ class CacheEntry:
):
self.link = link
self.persistent = persistent
self.origin: Optional[DirectUrl] = None
self.origin: DirectUrl | None = None
origin_direct_url_path = Path(self.link.file_path).parent / ORIGIN_JSON_NAME
if origin_direct_url_path.exists():
try:
@ -226,8 +227,8 @@ class WheelCache(Cache):
def get(
self,
link: Link,
package_name: Optional[str],
supported_tags: List[Tag],
package_name: str | None,
supported_tags: list[Tag],
) -> Link:
cache_entry = self.get_cache_entry(link, package_name, supported_tags)
if cache_entry is None:
@ -237,9 +238,9 @@ class WheelCache(Cache):
def get_cache_entry(
self,
link: Link,
package_name: Optional[str],
supported_tags: List[Tag],
) -> Optional[CacheEntry]:
package_name: str | None,
supported_tags: list[Tag],
) -> CacheEntry | None:
"""Returns a CacheEntry with a link to a cached item if it exists or
None. The cache entry indicates if the item was found in the persistent
or ephemeral cache.

View File

@ -1,4 +1,3 @@
"""Subpackage containing all of pip's command line interface related code
"""
"""Subpackage containing all of pip's command line interface related code"""
# This file intentionally does not import submodules

View File

@ -1,11 +1,13 @@
"""Logic that powers autocompletion installed by ``pip completion``.
"""
"""Logic that powers autocompletion installed by ``pip completion``."""
from __future__ import annotations
import optparse
import os
import sys
from collections.abc import Iterable
from itertools import chain
from typing import Any, Iterable, List, Optional
from typing import Any
from pip._internal.cli.main_parser import create_main_parser
from pip._internal.commands import commands_dict, create_command
@ -33,7 +35,7 @@ def autocomplete() -> None:
options = []
# subcommand
subcommand_name: Optional[str] = None
subcommand_name: str | None = None
for word in cwords:
if word in subcommands:
subcommand_name = word
@ -101,6 +103,12 @@ def autocomplete() -> None:
if option[1] and option[0][:2] == "--":
opt_label += "="
print(opt_label)
# Complete sub-commands (unless one is already given).
if not any(name in cwords for name in subcommand.handler_map()):
for handler_name in subcommand.handler_map():
if handler_name.startswith(current):
print(handler_name)
else:
# show main parser options only when necessary
@ -122,8 +130,8 @@ def autocomplete() -> None:
def get_path_completion_type(
cwords: List[str], cword: int, opts: Iterable[Any]
) -> Optional[str]:
cwords: list[str], cword: int, opts: Iterable[Any]
) -> str | None:
"""Get the type of path completion (``file``, ``dir``, ``path`` or None)
:param cwords: same as the environmental variable ``COMP_WORDS``

View File

@ -1,5 +1,7 @@
"""Base Command class, and related routines"""
from __future__ import annotations
import logging
import logging.config
import optparse
@ -7,7 +9,7 @@ import os
import sys
import traceback
from optparse import Values
from typing import List, Optional, Tuple
from typing import Callable
from pip._vendor.rich import reconfigure
from pip._vendor.rich import traceback as rich_traceback
@ -60,7 +62,7 @@ class Command(CommandContextMixIn):
isolated=isolated,
)
self.tempdir_registry: Optional[TempDirRegistry] = None
self.tempdir_registry: TempDirRegistry | None = None
# Commands should add options to this option group
optgroup_name = f"{self.name.capitalize()} Options"
@ -87,10 +89,10 @@ class Command(CommandContextMixIn):
# are present.
assert not hasattr(options, "no_index")
def run(self, options: Values, args: List[str]) -> int:
def run(self, options: Values, args: list[str]) -> int:
raise NotImplementedError
def _run_wrapper(self, level_number: int, options: Values, args: List[str]) -> int:
def _run_wrapper(self, level_number: int, options: Values, args: list[str]) -> int:
def _inner_run() -> int:
try:
return self.run(options, args)
@ -147,18 +149,18 @@ class Command(CommandContextMixIn):
return UNKNOWN_ERROR
def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]:
def parse_args(self, args: list[str]) -> tuple[Values, list[str]]:
# factored out for testability
return self.parser.parse_args(args)
def main(self, args: List[str]) -> int:
def main(self, args: list[str]) -> int:
try:
with self.main_context():
return self._main(args)
finally:
logging.shutdown()
def _main(self, args: List[str]) -> int:
def _main(self, args: list[str]) -> int:
# We must initialize this before the tempdir manager, otherwise the
# configuration would not be accessible by the time we clean up the
# tempdir manager.
@ -171,6 +173,11 @@ class Command(CommandContextMixIn):
# Set verbosity so that it can be used elsewhere.
self.verbosity = options.verbose - options.quiet
if options.debug_mode:
self.verbosity = 2
if hasattr(options, "progress_bar") and options.progress_bar == "auto":
options.progress_bar = "on" if self.verbosity >= 0 else "off"
reconfigure(no_color=options.no_color)
level_number = setup_logging(
@ -229,3 +236,9 @@ class Command(CommandContextMixIn):
options.cache_dir = None
return self._run_wrapper(level_number, options, args)
def handler_map(self) -> dict[str, Callable[[Values, list[str]], None]]:
"""
map of names to handler actions for commands with sub-actions
"""
return {}

View File

@ -9,15 +9,16 @@ pass on state. To be consistent, all options will follow this design.
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
from __future__ import annotations
import importlib.util
import logging
import os
import pathlib
import textwrap
from functools import partial
from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values
from textwrap import dedent
from typing import Any, Callable, Dict, Optional, Tuple
from typing import Any, Callable
from pip._vendor.packaging.utils import canonicalize_name
@ -47,7 +48,7 @@ def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None:
parser.error(msg)
def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> OptionGroup:
def make_option_group(group: dict[str, Any], parser: ConfigOptionParser) -> OptionGroup:
"""
Return an OptionGroup object
group -- assumed to be dict with 'name' and 'options' keys
@ -99,6 +100,29 @@ def check_dist_restriction(options: Values, check_target: bool = False) -> None:
)
def check_build_constraints(options: Values) -> None:
"""Function for validating build constraints options.
:param options: The OptionParser options.
"""
if hasattr(options, "build_constraints") and options.build_constraints:
if not options.build_isolation:
raise CommandError(
"--build-constraint cannot be used with --no-build-isolation."
)
# Import here to avoid circular imports
from pip._internal.network.session import PipSession
from pip._internal.req.req_file import get_file_content
# Eagerly check build constraints file contents
# is valid so that we don't fail in when trying
# to check constraints in isolated build process
with PipSession() as session:
for constraint_file in options.build_constraints:
get_file_content(constraint_file, session)
def _path_option_check(option: Option, opt: str, value: str) -> str:
return os.path.expanduser(value)
@ -159,8 +183,7 @@ require_virtualenv: Callable[..., Option] = partial(
action="store_true",
default=False,
help=(
"Allow pip to only run in a virtual environment; "
"exit with an error otherwise."
"Allow pip to only run in a virtual environment; exit with an error otherwise."
),
)
@ -226,9 +249,13 @@ progress_bar: Callable[..., Option] = partial(
"--progress-bar",
dest="progress_bar",
type="choice",
choices=["on", "off", "raw"],
default="on",
help="Specify whether the progress bar should be used [on, off, raw] (default: on)",
choices=["auto", "on", "off", "raw"],
default="auto",
help=(
"Specify whether the progress bar should be used. In 'auto'"
" mode, --quiet will suppress all progress bars."
" [auto, on, off, raw] (default: auto)"
),
)
log: Callable[..., Option] = partial(
@ -260,8 +287,8 @@ keyring_provider: Callable[..., Option] = partial(
default="auto",
help=(
"Enable the credential lookup via the keyring library if user input is allowed."
" Specify which mechanism to use [disabled, import, subprocess]."
" (default: disabled)"
" Specify which mechanism to use [auto, disabled, import, subprocess]."
" (default: %default)"
),
)
@ -280,8 +307,17 @@ retries: Callable[..., Option] = partial(
dest="retries",
type="int",
default=5,
help="Maximum number of retries each connection should attempt "
"(default %default times).",
help="Maximum attempts to establish a new HTTP connection. (default: %default)",
)
resume_retries: Callable[..., Option] = partial(
Option,
"--resume-retries",
dest="resume_retries",
type="int",
default=5,
help="Maximum attempts to resume or restart an incomplete download. "
"(default: %default)",
)
timeout: Callable[..., Option] = partial(
@ -415,6 +451,21 @@ def constraints() -> Option:
)
def build_constraints() -> Option:
return Option(
"--build-constraint",
dest="build_constraints",
action="append",
type="str",
default=[],
metavar="file",
help=(
"Constrain build dependencies using the given constraints file. "
"This option can be used multiple times."
),
)
def requirements() -> Option:
return Option(
"-r",
@ -545,7 +596,7 @@ platforms: Callable[..., Option] = partial(
# This was made a separate function for unit-testing purposes.
def _convert_python_version(value: str) -> Tuple[Tuple[int, ...], Optional[str]]:
def _convert_python_version(value: str) -> tuple[tuple[int, ...], str | None]:
"""
Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
@ -733,6 +784,46 @@ no_deps: Callable[..., Option] = partial(
help="Don't install package dependencies.",
)
def _handle_dependency_group(
option: Option, opt: str, value: str, parser: OptionParser
) -> None:
"""
Process a value provided for the --group option.
Splits on the rightmost ":", and validates that the path (if present) ends
in `pyproject.toml`. Defaults the path to `pyproject.toml` when one is not given.
`:` cannot appear in dependency group names, so this is a safe and simple parse.
This is an optparse.Option callback for the dependency_groups option.
"""
path, sep, groupname = value.rpartition(":")
if not sep:
path = "pyproject.toml"
else:
# check for 'pyproject.toml' filenames using pathlib
if pathlib.PurePath(path).name != "pyproject.toml":
msg = "group paths use 'pyproject.toml' filenames"
raise_option_error(parser, option=option, msg=msg)
parser.values.dependency_groups.append((path, groupname))
dependency_groups: Callable[..., Option] = partial(
Option,
"--group",
dest="dependency_groups",
default=[],
type=str,
action="callback",
callback=_handle_dependency_group,
metavar="[path:]group",
help='Install a named dependency-group from a "pyproject.toml" file. '
'If a path is given, the name of the file must be "pyproject.toml". '
'Defaults to using "pyproject.toml" in the current directory.',
)
ignore_requires_python: Callable[..., Option] = partial(
Option,
"--ignore-requires-python",
@ -758,62 +849,16 @@ check_build_deps: Callable[..., Option] = partial(
dest="check_build_deps",
action="store_true",
default=False,
help="Check the build dependencies when PEP517 is used.",
help="Check the build dependencies.",
)
def _handle_no_use_pep517(
option: Option, opt: str, value: str, parser: OptionParser
) -> None:
"""
Process a value provided for the --no-use-pep517 option.
This is an optparse.Option callback for the no_use_pep517 option.
"""
# Since --no-use-pep517 doesn't accept arguments, the value argument
# will be None if --no-use-pep517 is passed via the command-line.
# However, the value can be non-None if the option is triggered e.g.
# by an environment variable, for example "PIP_NO_USE_PEP517=true".
if value is not None:
msg = """A value was passed for --no-use-pep517,
probably using either the PIP_NO_USE_PEP517 environment variable
or the "no-use-pep517" config file option. Use an appropriate value
of the PIP_USE_PEP517 environment variable or the "use-pep517"
config file option instead.
"""
raise_option_error(parser, option=option, msg=msg)
# If user doesn't wish to use pep517, we check if setuptools and wheel are installed
# and raise error if it is not.
packages = ("setuptools", "wheel")
if not all(importlib.util.find_spec(package) for package in packages):
msg = (
f"It is not possible to use --no-use-pep517 "
f"without {' and '.join(packages)} installed."
)
raise_option_error(parser, option=option, msg=msg)
# Otherwise, --no-use-pep517 was passed via the command-line.
parser.values.use_pep517 = False
use_pep517: Any = partial(
Option,
"--use-pep517",
dest="use_pep517",
action="store_true",
default=None,
help="Use PEP 517 for building source distributions "
"(use --no-use-pep517 to force legacy behaviour).",
)
no_use_pep517: Any = partial(
Option,
"--no-use-pep517",
dest="use_pep517",
action="callback",
callback=_handle_no_use_pep517,
default=None,
default=True,
help=SUPPRESS_HELP,
)
@ -846,30 +891,11 @@ config_settings: Callable[..., Option] = partial(
action="callback",
callback=_handle_config_settings,
metavar="settings",
help="Configuration settings to be passed to the PEP 517 build backend. "
help="Configuration settings to be passed to the build backend. "
"Settings take the form KEY=VALUE. Use multiple --config-settings options "
"to pass multiple keys to the backend.",
)
build_options: Callable[..., Option] = partial(
Option,
"--build-option",
dest="build_options",
metavar="options",
action="append",
help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
)
global_options: Callable[..., Option] = partial(
Option,
"--global-option",
dest="global_options",
action="append",
metavar="options",
help="Extra global options to be supplied to the setup.py "
"call before the install or bdist_wheel command.",
)
no_clean: Callable[..., Option] = partial(
Option,
"--no-clean",
@ -887,6 +913,14 @@ pre: Callable[..., Option] = partial(
"pip only finds stable versions.",
)
json: Callable[..., Option] = partial(
Option,
"--json",
action="store_true",
default=False,
help="Output data in a machine-readable JSON format.",
)
disable_pip_version_check: Callable[..., Option] = partial(
Option,
"--disable-pip-version-check",
@ -990,7 +1024,7 @@ no_python_version_warning: Callable[..., Option] = partial(
dest="no_python_version_warning",
action="store_true",
default=False,
help="Silence deprecation warnings for upcoming unsupported Pythons.",
help=SUPPRESS_HELP, # No-op, a hold-over from the Python 2->3 transition.
)
@ -1009,6 +1043,7 @@ use_new_feature: Callable[..., Option] = partial(
default=[],
choices=[
"fast-deps",
"build-constraint",
]
+ ALWAYS_ENABLED_FEATURES,
help="Enable new functionality, that may be backward incompatible.",
@ -1028,12 +1063,11 @@ use_deprecated_feature: Callable[..., Option] = partial(
help=("Enable deprecated functionality, that will be removed in the future."),
)
##########
# groups #
##########
general_group: Dict[str, Any] = {
general_group: dict[str, Any] = {
"name": "General Options",
"options": [
help_,
@ -1061,10 +1095,11 @@ general_group: Dict[str, Any] = {
no_python_version_warning,
use_new_feature,
use_deprecated_feature,
resume_retries,
],
}
index_group: Dict[str, Any] = {
index_group: dict[str, Any] = {
"name": "Package Index Options",
"options": [
index_url,

View File

@ -1,5 +1,6 @@
from contextlib import ExitStack, contextmanager
from typing import ContextManager, Generator, TypeVar
from collections.abc import Generator
from contextlib import AbstractContextManager, ExitStack, contextmanager
from typing import TypeVar
_T = TypeVar("_T", covariant=True)
@ -21,7 +22,7 @@ class CommandContextMixIn:
finally:
self._in_main_context = False
def enter_context(self, context_provider: ContextManager[_T]) -> _T:
def enter_context(self, context_provider: AbstractContextManager[_T]) -> _T:
assert self._in_main_context
return self._main_context.enter_context(context_provider)

View File

@ -6,11 +6,14 @@ so commands which don't always hit the network (e.g. list w/o --outdated or
--uptodate) don't need waste time importing PipSession and friends.
"""
from __future__ import annotations
import logging
import os
import sys
from functools import lru_cache
from optparse import Values
from typing import TYPE_CHECKING, List, Optional
from typing import TYPE_CHECKING
from pip._vendor import certifi
@ -25,7 +28,8 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__)
def _create_truststore_ssl_context() -> Optional["SSLContext"]:
@lru_cache
def _create_truststore_ssl_context() -> SSLContext | None:
if sys.version_info < (3, 10):
logger.debug("Disabling truststore because Python version isn't 3.10+")
return None
@ -54,10 +58,10 @@ class SessionCommandMixin(CommandContextMixIn):
def __init__(self) -> None:
super().__init__()
self._session: Optional["PipSession"] = None
self._session: PipSession | None = None
@classmethod
def _get_index_urls(cls, options: Values) -> Optional[List[str]]:
def _get_index_urls(cls, options: Values) -> list[str] | None:
"""Return a list of index urls from user-provided options."""
index_urls = []
if not getattr(options, "no_index", False):
@ -70,7 +74,7 @@ class SessionCommandMixin(CommandContextMixIn):
# Return None rather than an empty list
return index_urls or None
def get_default_session(self, options: Values) -> "PipSession":
def get_default_session(self, options: Values) -> PipSession:
"""Get a default-managed session."""
if self._session is None:
self._session = self.enter_context(self._build_session(options))
@ -83,9 +87,9 @@ class SessionCommandMixin(CommandContextMixIn):
def _build_session(
self,
options: Values,
retries: Optional[int] = None,
timeout: Optional[int] = None,
) -> "PipSession":
retries: int | None = None,
timeout: int | None = None,
) -> PipSession:
from pip._internal.network.session import PipSession
cache_dir = options.cache_dir
@ -123,6 +127,7 @@ class SessionCommandMixin(CommandContextMixIn):
"https": options.proxy,
}
session.trust_env = False
session.pip_proxy = options.proxy
# Determine if we can prompt the user for authentication or not
session.auth.prompting = not options.no_input
@ -131,7 +136,7 @@ class SessionCommandMixin(CommandContextMixIn):
return session
def _pip_self_version_check(session: "PipSession", options: Values) -> None:
def _pip_self_version_check(session: PipSession, options: Values) -> None:
from pip._internal.self_outdated_check import pip_self_version_check as check
check(session, options)

View File

@ -1,12 +1,12 @@
"""Primary application entrypoint.
"""
"""Primary application entrypoint."""
from __future__ import annotations
import locale
import logging
import os
import sys
import warnings
from typing import List, Optional
from pip._internal.cli.autocompletion import autocomplete
from pip._internal.cli.main_parser import parse_command
@ -44,7 +44,7 @@ logger = logging.getLogger(__name__)
# main, this should not be an issue in practice.
def main(args: Optional[List[str]] = None) -> int:
def main(args: list[str] | None = None) -> int:
if args is None:
args = sys.argv[1:]

View File

@ -1,10 +1,10 @@
"""A single place for constructing and exposing the main parser
"""
"""A single place for constructing and exposing the main parser"""
from __future__ import annotations
import os
import subprocess
import sys
from typing import List, Optional, Tuple
from pip._internal.build_env import get_runnable_pip
from pip._internal.cli import cmdoptions
@ -47,7 +47,7 @@ def create_main_parser() -> ConfigOptionParser:
return parser
def identify_python_interpreter(python: str) -> Optional[str]:
def identify_python_interpreter(python: str) -> str | None:
# If the named file exists, use it.
# If it's a directory, assume it's a virtual environment and
# look for the environment's Python executable.
@ -66,7 +66,7 @@ def identify_python_interpreter(python: str) -> Optional[str]:
return None
def parse_command(args: List[str]) -> Tuple[str, List[str]]:
def parse_command(args: list[str]) -> tuple[str, list[str]]:
parser = create_main_parser()
# Note: parser calls disable_interspersed_args(), so the result of this

View File

@ -1,12 +1,15 @@
"""Base option parser setup"""
from __future__ import annotations
import logging
import optparse
import shutil
import sys
import textwrap
from collections.abc import Generator
from contextlib import suppress
from typing import Any, Dict, Generator, List, Optional, Tuple
from typing import Any, NoReturn
from pip._internal.cli.status_codes import UNKNOWN_ERROR
from pip._internal.configuration import Configuration, ConfigurationError
@ -67,7 +70,7 @@ class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " "))
return msg
def format_description(self, description: Optional[str]) -> str:
def format_description(self, description: str | None) -> str:
# leave full control over description to us
if description:
if hasattr(self.parser, "main"):
@ -85,7 +88,7 @@ class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
else:
return ""
def format_epilog(self, epilog: Optional[str]) -> str:
def format_epilog(self, epilog: str | None) -> str:
# leave full control over epilog to us
if epilog:
return epilog
@ -142,7 +145,7 @@ class CustomOptionParser(optparse.OptionParser):
return group
@property
def option_list_all(self) -> List[optparse.Option]:
def option_list_all(self) -> list[optparse.Option]:
"""Get a list of all options, including those in option groups."""
res = self.option_list[:]
for i in self.option_groups:
@ -177,19 +180,21 @@ class ConfigOptionParser(CustomOptionParser):
def _get_ordered_configuration_items(
self,
) -> Generator[Tuple[str, Any], None, None]:
) -> Generator[tuple[str, Any], None, None]:
# Configuration gives keys in an unordered manner. Order them.
override_order = ["global", self.name, ":env:"]
# Pool the options into different groups
section_items: Dict[str, List[Tuple[str, Any]]] = {
section_items: dict[str, list[tuple[str, Any]]] = {
name: [] for name in override_order
}
for section_key, val in self.config.items():
for _, value in self.config.items(): # noqa: PERF102
for section_key, val in value.items():
# ignore empty values
if not val:
logger.debug(
"Ignoring configuration key '%s' as it's value is empty.",
"Ignoring configuration key '%s' as its value is empty.",
section_key,
)
continue
@ -200,10 +205,9 @@ class ConfigOptionParser(CustomOptionParser):
# Yield each group in their override order
for section in override_order:
for key, val in section_items[section]:
yield key, val
yield from section_items[section]
def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]:
def _update_defaults(self, defaults: dict[str, Any]) -> dict[str, Any]:
"""Updates the given defaults with values from the config files and
the environ. Does a little special handling for certain types of
options (lists)."""
@ -289,6 +293,6 @@ class ConfigOptionParser(CustomOptionParser):
defaults[option.dest] = option.check_value(opt_str, default)
return optparse.Values(defaults)
def error(self, msg: str) -> None:
def error(self, msg: str) -> NoReturn:
self.print_usage(sys.stderr)
self.exit(UNKNOWN_ERROR, f"{msg}\n")

View File

@ -1,11 +1,15 @@
from __future__ import annotations
import functools
import sys
from typing import Callable, Generator, Iterable, Iterator, Optional, Tuple
from collections.abc import Generator, Iterable, Iterator
from typing import Callable, Literal, TypeVar
from pip._vendor.rich.progress import (
BarColumn,
DownloadColumn,
FileSizeColumn,
MofNCompleteColumn,
Progress,
ProgressColumn,
SpinnerColumn,
@ -16,22 +20,26 @@ from pip._vendor.rich.progress import (
)
from pip._internal.cli.spinners import RateLimiter
from pip._internal.utils.logging import get_indentation
from pip._internal.req.req_install import InstallRequirement
from pip._internal.utils.logging import get_console, get_indentation
DownloadProgressRenderer = Callable[[Iterable[bytes]], Iterator[bytes]]
T = TypeVar("T")
ProgressRenderer = Callable[[Iterable[T]], Iterator[T]]
BarType = Literal["on", "off", "raw"]
def _rich_progress_bar(
def _rich_download_progress_bar(
iterable: Iterable[bytes],
*,
bar_type: str,
size: int,
bar_type: BarType,
size: int | None,
initial_progress: int | None = None,
) -> Generator[bytes, None, None]:
assert bar_type == "on", "This should only be used in the default mode."
if not size:
total = float("inf")
columns: Tuple[ProgressColumn, ...] = (
columns: tuple[ProgressColumn, ...] = (
TextColumn("[progress.description]{task.description}"),
SpinnerColumn("line", speed=1.5),
FileSizeColumn(),
@ -45,28 +53,56 @@ def _rich_progress_bar(
BarColumn(),
DownloadColumn(),
TransferSpeedColumn(),
TextColumn("eta"),
TimeRemainingColumn(),
TextColumn("{task.fields[time_description]}"),
TimeRemainingColumn(elapsed_when_finished=True),
)
progress = Progress(*columns, refresh_per_second=5)
task_id = progress.add_task(" " * (get_indentation() + 2), total=total)
task_id = progress.add_task(
" " * (get_indentation() + 2), total=total, time_description="eta"
)
if initial_progress is not None:
progress.update(task_id, advance=initial_progress)
with progress:
for chunk in iterable:
yield chunk
progress.update(task_id, advance=len(chunk))
progress.update(task_id, time_description="")
def _rich_install_progress_bar(
iterable: Iterable[InstallRequirement], *, total: int
) -> Iterator[InstallRequirement]:
columns = (
TextColumn("{task.fields[indent]}"),
BarColumn(),
MofNCompleteColumn(),
TextColumn("{task.description}"),
)
console = get_console()
bar = Progress(*columns, refresh_per_second=6, console=console, transient=True)
# Hiding the progress bar at initialization forces a refresh cycle to occur
# until the bar appears, avoiding very short flashes.
task = bar.add_task("", total=total, indent=" " * get_indentation(), visible=False)
with bar:
for req in iterable:
bar.update(task, description=rf"\[{req.name}]", visible=True)
yield req
bar.advance(task)
def _raw_progress_bar(
iterable: Iterable[bytes],
*,
size: Optional[int],
size: int | None,
initial_progress: int | None = None,
) -> Generator[bytes, None, None]:
def write_progress(current: int, total: int) -> None:
sys.stdout.write("Progress %d of %d\n" % (current, total))
sys.stdout.write(f"Progress {current} of {total}\n")
sys.stdout.flush()
current = 0
current = initial_progress or 0
total = size or 0
rate_limiter = RateLimiter(0.25)
@ -80,15 +116,36 @@ def _raw_progress_bar(
def get_download_progress_renderer(
*, bar_type: str, size: Optional[int] = None
) -> DownloadProgressRenderer:
*, bar_type: BarType, size: int | None = None, initial_progress: int | None = None
) -> ProgressRenderer[bytes]:
"""Get an object that can be used to render the download progress.
Returns a callable, that takes an iterable to "wrap".
"""
if bar_type == "on":
return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size)
return functools.partial(
_rich_download_progress_bar,
bar_type=bar_type,
size=size,
initial_progress=initial_progress,
)
elif bar_type == "raw":
return functools.partial(_raw_progress_bar, size=size)
return functools.partial(
_raw_progress_bar,
size=size,
initial_progress=initial_progress,
)
else:
return iter # no-op, when passed an iterator
def get_install_progress_renderer(
*, bar_type: BarType, total: int
) -> ProgressRenderer[InstallRequirement]:
"""Get an object that can be used to render the install progress.
Returns a callable, that takes an iterable to "wrap".
"""
if bar_type == "on":
return functools.partial(_rich_install_progress_bar, total=total)
else:
return iter

View File

@ -5,11 +5,15 @@ need PackageFinder capability don't unnecessarily import the
PackageFinder machinery and all its vendored dependencies, etc.
"""
from __future__ import annotations
import logging
import os
from functools import partial
from optparse import Values
from typing import Any, List, Optional, Tuple
from typing import Any, Callable, TypeVar
from pip._internal.build_env import SubprocessBuildEnvironmentInstaller
from pip._internal.cache import WheelCache
from pip._internal.cli import cmdoptions
from pip._internal.cli.index_command import IndexGroupCommand
@ -28,6 +32,7 @@ from pip._internal.req.constructors import (
install_req_from_parsed_requirement,
install_req_from_req_string,
)
from pip._internal.req.req_dependency_group import parse_dependency_groups
from pip._internal.req.req_file import parse_requirements
from pip._internal.req.req_install import InstallRequirement
from pip._internal.resolution.base import BaseResolver
@ -40,6 +45,16 @@ from pip._internal.utils.temp_dir import (
logger = logging.getLogger(__name__)
def should_ignore_regular_constraints(options: Values) -> bool:
"""
Check if regular constraints should be ignored because
we are in a isolated build process and build constraints
feature is enabled but no build constraints were passed.
"""
return os.environ.get("_PIP_IN_BUILD_IGNORE_CONSTRAINTS") == "1"
KEEPABLE_TEMPDIR_TYPES = [
tempdir_kinds.BUILD_ENV,
tempdir_kinds.EPHEM_WHEEL_CACHE,
@ -47,7 +62,12 @@ KEEPABLE_TEMPDIR_TYPES = [
]
def with_cleanup(func: Any) -> Any:
_CommandT = TypeVar("_CommandT", bound="RequirementCommand")
def with_cleanup(
func: Callable[[_CommandT, Values, list[str]], int],
) -> Callable[[_CommandT, Values, list[str]], int]:
"""Decorator for common logic related to managing temporary
directories.
"""
@ -56,9 +76,7 @@ def with_cleanup(func: Any) -> Any:
for t in KEEPABLE_TEMPDIR_TYPES:
registry.set_delete(t, False)
def wrapper(
self: RequirementCommand, options: Values, args: List[Any]
) -> Optional[int]:
def wrapper(self: _CommandT, options: Values, args: list[str]) -> int:
assert self.tempdir_registry is not None
if options.no_clean:
configure_tempdir_registry(self.tempdir_registry)
@ -79,6 +97,7 @@ class RequirementCommand(IndexGroupCommand):
def __init__(self, *args: Any, **kw: Any) -> None:
super().__init__(*args, **kw)
self.cmd_opts.add_option(cmdoptions.dependency_groups())
self.cmd_opts.add_option(cmdoptions.no_clean())
@staticmethod
@ -98,7 +117,7 @@ class RequirementCommand(IndexGroupCommand):
session: PipSession,
finder: PackageFinder,
use_user_site: bool,
download_dir: Optional[str] = None,
download_dir: str | None = None,
verbosity: int = 0,
) -> RequirementPreparer:
"""
@ -127,11 +146,22 @@ class RequirementCommand(IndexGroupCommand):
"fast-deps has no effect when used with the legacy resolver."
)
# Handle build constraints
build_constraints = getattr(options, "build_constraints", [])
build_constraint_feature_enabled = (
"build-constraint" in options.features_enabled
)
return RequirementPreparer(
build_dir=temp_build_dir_path,
src_dir=options.src_dir,
download_dir=download_dir,
build_isolation=options.build_isolation,
build_isolation_installer=SubprocessBuildEnvironmentInstaller(
finder,
build_constraints=build_constraints,
build_constraint_feature_enabled=build_constraint_feature_enabled,
),
check_build_deps=options.check_build_deps,
build_tracker=build_tracker,
session=session,
@ -142,6 +172,7 @@ class RequirementCommand(IndexGroupCommand):
lazy_wheel=lazy_wheel,
verbosity=verbosity,
legacy_resolver=legacy_resolver,
resume_retries=options.resume_retries,
)
@classmethod
@ -150,14 +181,13 @@ class RequirementCommand(IndexGroupCommand):
preparer: RequirementPreparer,
finder: PackageFinder,
options: Values,
wheel_cache: Optional[WheelCache] = None,
wheel_cache: WheelCache | None = None,
use_user_site: bool = False,
ignore_installed: bool = True,
ignore_requires_python: bool = False,
force_reinstall: bool = False,
upgrade_strategy: str = "to-satisfy-only",
use_pep517: Optional[bool] = None,
py_version_info: Optional[Tuple[int, ...]] = None,
py_version_info: tuple[int, ...] | None = None,
) -> BaseResolver:
"""
Create a Resolver instance for the given parameters.
@ -165,7 +195,6 @@ class RequirementCommand(IndexGroupCommand):
make_install_req = partial(
install_req_from_req_string,
isolated=options.isolated_mode,
use_pep517=use_pep517,
)
resolver_variant = cls.determine_resolver_variant(options)
# The long import name and duplicated invocation is needed to convince
@ -205,15 +234,17 @@ class RequirementCommand(IndexGroupCommand):
def get_requirements(
self,
args: List[str],
args: list[str],
options: Values,
finder: PackageFinder,
session: PipSession,
) -> List[InstallRequirement]:
) -> list[InstallRequirement]:
"""
Parse command-line arguments into the corresponding requirements.
"""
requirements: List[InstallRequirement] = []
requirements: list[InstallRequirement] = []
if not should_ignore_regular_constraints(options):
for filename in options.constraints:
for parsed_req in parse_requirements(
filename,
@ -234,18 +265,25 @@ class RequirementCommand(IndexGroupCommand):
req,
comes_from=None,
isolated=options.isolated_mode,
use_pep517=options.use_pep517,
user_supplied=True,
config_settings=getattr(options, "config_settings", None),
)
requirements.append(req_to_add)
if options.dependency_groups:
for req in parse_dependency_groups(options.dependency_groups):
req_to_add = install_req_from_req_string(
req,
isolated=options.isolated_mode,
user_supplied=True,
)
requirements.append(req_to_add)
for req in options.editables:
req_to_add = install_req_from_editable(
req,
user_supplied=True,
isolated=options.isolated_mode,
use_pep517=options.use_pep517,
config_settings=getattr(options, "config_settings", None),
)
requirements.append(req_to_add)
@ -258,7 +296,6 @@ class RequirementCommand(IndexGroupCommand):
req_to_add = install_req_from_parsed_requirement(
parsed_req,
isolated=options.isolated_mode,
use_pep517=options.use_pep517,
user_supplied=True,
config_settings=(
parsed_req.options.get("config_settings")
@ -272,7 +309,12 @@ class RequirementCommand(IndexGroupCommand):
if any(req.has_hash_options for req in requirements):
options.require_hashes = True
if not (args or options.editables or options.requirements):
if not (
args
or options.editables
or options.requirements
or options.dependency_groups
):
opts = {"name": self.name}
if options.find_links:
raise CommandError(
@ -304,8 +346,8 @@ class RequirementCommand(IndexGroupCommand):
self,
options: Values,
session: PipSession,
target_python: Optional[TargetPython] = None,
ignore_requires_python: Optional[bool] = None,
target_python: TargetPython | None = None,
ignore_requires_python: bool | None = None,
) -> PackageFinder:
"""
Create a package finder appropriate to this requirement command.

View File

@ -1,15 +1,31 @@
from __future__ import annotations
import contextlib
import itertools
import logging
import sys
import time
from typing import IO, Generator, Optional
from collections.abc import Generator
from typing import IO, Final
from pip._vendor.rich.console import (
Console,
ConsoleOptions,
RenderableType,
RenderResult,
)
from pip._vendor.rich.live import Live
from pip._vendor.rich.measure import Measurement
from pip._vendor.rich.text import Text
from pip._internal.utils.compat import WINDOWS
from pip._internal.utils.logging import get_indentation
from pip._internal.utils.logging import get_console, get_indentation
logger = logging.getLogger(__name__)
SPINNER_CHARS: Final = r"-\|/"
SPINS_PER_SECOND: Final = 8
class SpinnerInterface:
def spin(self) -> None:
@ -23,10 +39,10 @@ class InteractiveSpinner(SpinnerInterface):
def __init__(
self,
message: str,
file: Optional[IO[str]] = None,
spin_chars: str = "-\\|/",
file: IO[str] | None = None,
spin_chars: str = SPINNER_CHARS,
# Empirically, 8 updates/second looks nice
min_update_interval_seconds: float = 0.125,
min_update_interval_seconds: float = 1 / SPINS_PER_SECOND,
):
self._message = message
if file is None:
@ -136,6 +152,66 @@ def open_spinner(message: str) -> Generator[SpinnerInterface, None, None]:
spinner.finish("done")
class _PipRichSpinner:
"""
Custom rich spinner that matches the style of the legacy spinners.
(*) Updates will be handled in a background thread by a rich live panel
which will call render() automatically at the appropriate time.
"""
def __init__(self, label: str) -> None:
self.label = label
self._spin_cycle = itertools.cycle(SPINNER_CHARS)
self._spinner_text = ""
self._finished = False
self._indent = get_indentation() * " "
def __rich_console__(
self, console: Console, options: ConsoleOptions
) -> RenderResult:
yield self.render()
def __rich_measure__(
self, console: Console, options: ConsoleOptions
) -> Measurement:
text = self.render()
return Measurement.get(console, options, text)
def render(self) -> RenderableType:
if not self._finished:
self._spinner_text = next(self._spin_cycle)
return Text.assemble(self._indent, self.label, " ... ", self._spinner_text)
def finish(self, status: str) -> None:
"""Stop spinning and set a final status message."""
self._spinner_text = status
self._finished = True
@contextlib.contextmanager
def open_rich_spinner(label: str, console: Console | None = None) -> Generator[None]:
if not logger.isEnabledFor(logging.INFO):
# Don't show spinner if --quiet is given.
yield
return
console = console or get_console()
spinner = _PipRichSpinner(label)
with Live(spinner, refresh_per_second=SPINS_PER_SECOND, console=console):
try:
yield
except KeyboardInterrupt:
spinner.finish("canceled")
raise
except Exception:
spinner.finish("error")
raise
else:
spinner.finish("done")
HIDE_CURSOR = "\x1b[?25l"
SHOW_CURSOR = "\x1b[?25h"

View File

@ -2,9 +2,11 @@
Package containing all pip commands
"""
from __future__ import annotations
import importlib
from collections import namedtuple
from typing import Any, Dict, Optional
from typing import Any
from pip._internal.cli.base_command import Command
@ -17,12 +19,17 @@ CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary")
# Even though the module path starts with the same "pip._internal.commands"
# prefix, the full path makes testing easier (specifically when modifying
# `commands_dict` in test setup / teardown).
commands_dict: Dict[str, CommandInfo] = {
commands_dict: dict[str, CommandInfo] = {
"install": CommandInfo(
"pip._internal.commands.install",
"InstallCommand",
"Install packages.",
),
"lock": CommandInfo(
"pip._internal.commands.lock",
"LockCommand",
"Generate a lock file.",
),
"download": CommandInfo(
"pip._internal.commands.download",
"DownloadCommand",
@ -118,7 +125,7 @@ def create_command(name: str, **kwargs: Any) -> Command:
return command
def get_similar_commands(name: str) -> Optional[str]:
def get_similar_commands(name: str) -> str | None:
"""Command name auto-correct."""
from difflib import get_close_matches

View File

@ -1,13 +1,14 @@
import os
import textwrap
from optparse import Values
from typing import Any, List
from typing import Callable
from pip._internal.cli.base_command import Command
from pip._internal.cli.status_codes import ERROR, SUCCESS
from pip._internal.exceptions import CommandError, PipError
from pip._internal.utils import filesystem
from pip._internal.utils.logging import getLogger
from pip._internal.utils.misc import format_size
logger = getLogger(__name__)
@ -48,8 +49,8 @@ class CacheCommand(Command):
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options: Values, args: List[str]) -> int:
handlers = {
def handler_map(self) -> dict[str, Callable[[Values, list[str]], None]]:
return {
"dir": self.get_cache_dir,
"info": self.get_cache_info,
"list": self.list_cache_items,
@ -57,15 +58,18 @@ class CacheCommand(Command):
"purge": self.purge_cache,
}
def run(self, options: Values, args: list[str]) -> int:
handler_map = self.handler_map()
if not options.cache_dir:
logger.error("pip cache commands can not function since cache is disabled.")
return ERROR
# Determine action
if not args or args[0] not in handlers:
if not args or args[0] not in handler_map:
logger.error(
"Need an action (%s) to perform.",
", ".join(sorted(handlers)),
", ".join(sorted(handler_map)),
)
return ERROR
@ -73,20 +77,20 @@ class CacheCommand(Command):
# Error handling happens here, not in the action-handlers.
try:
handlers[action](options, args[1:])
handler_map[action](options, args[1:])
except PipError as e:
logger.error(e.args[0])
return ERROR
return SUCCESS
def get_cache_dir(self, options: Values, args: List[Any]) -> None:
def get_cache_dir(self, options: Values, args: list[str]) -> None:
if args:
raise CommandError("Too many arguments")
logger.info(options.cache_dir)
def get_cache_info(self, options: Values, args: List[Any]) -> None:
def get_cache_info(self, options: Values, args: list[str]) -> None:
if args:
raise CommandError("Too many arguments")
@ -128,7 +132,7 @@ class CacheCommand(Command):
logger.info(message)
def list_cache_items(self, options: Values, args: List[Any]) -> None:
def list_cache_items(self, options: Values, args: list[str]) -> None:
if len(args) > 1:
raise CommandError("Too many arguments")
@ -143,7 +147,7 @@ class CacheCommand(Command):
else:
self.format_for_abspath(files)
def format_for_human(self, files: List[str]) -> None:
def format_for_human(self, files: list[str]) -> None:
if not files:
logger.info("No locally built wheels cached.")
return
@ -156,11 +160,11 @@ class CacheCommand(Command):
logger.info("Cache contents:\n")
logger.info("\n".join(sorted(results)))
def format_for_abspath(self, files: List[str]) -> None:
def format_for_abspath(self, files: list[str]) -> None:
if files:
logger.info("\n".join(sorted(files)))
def remove_cache_items(self, options: Values, args: List[Any]) -> None:
def remove_cache_items(self, options: Values, args: list[str]) -> None:
if len(args) > 1:
raise CommandError("Too many arguments")
@ -180,12 +184,14 @@ class CacheCommand(Command):
if not files:
logger.warning(no_matching_msg)
bytes_removed = 0
for filename in files:
bytes_removed += os.stat(filename).st_size
os.unlink(filename)
logger.verbose("Removed %s", filename)
logger.info("Files removed: %s", len(files))
logger.info("Files removed: %s (%s)", len(files), format_size(bytes_removed))
def purge_cache(self, options: Values, args: List[Any]) -> None:
def purge_cache(self, options: Values, args: list[str]) -> None:
if args:
raise CommandError("Too many arguments")
@ -194,14 +200,14 @@ class CacheCommand(Command):
def _cache_dir(self, options: Values, subdir: str) -> str:
return os.path.join(options.cache_dir, subdir)
def _find_http_files(self, options: Values) -> List[str]:
def _find_http_files(self, options: Values) -> list[str]:
old_http_dir = self._cache_dir(options, "http")
new_http_dir = self._cache_dir(options, "http-v2")
return filesystem.find_files(old_http_dir, "*") + filesystem.find_files(
new_http_dir, "*"
)
def _find_wheels(self, options: Values, pattern: str) -> List[str]:
def _find_wheels(self, options: Values, pattern: str) -> list[str]:
wheel_dir = self._cache_dir(options, "wheels")
# The wheel filename format, as specified in PEP 427, is:

View File

@ -1,6 +1,5 @@
import logging
from optparse import Values
from typing import List
from pip._internal.cli.base_command import Command
from pip._internal.cli.status_codes import ERROR, SUCCESS
@ -23,7 +22,7 @@ class CheckCommand(Command):
usage = """
%prog [options]"""
def run(self, options: Values, args: List[str]) -> int:
def run(self, options: Values, args: list[str]) -> int:
package_set, parsing_probs = create_package_set_from_installed()
missing, conflicting = check_package_set(package_set)
unsupported = list(

View File

@ -1,7 +1,6 @@
import sys
import textwrap
from optparse import Values
from typing import List
from pip._internal.cli.base_command import Command
from pip._internal.cli.status_codes import SUCCESS
@ -38,12 +37,18 @@ COMPLETION_SCRIPTS = {
""",
"fish": """
function __fish_complete_pip
set -lx COMP_WORDS (commandline -o) ""
set -lx COMP_CWORD ( \\
math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
)
set -lx COMP_WORDS \\
(commandline --current-process --tokenize --cut-at-cursor) \\
(commandline --current-token --cut-at-cursor)
set -lx COMP_CWORD (math (count $COMP_WORDS) - 1)
set -lx PIP_AUTO_COMPLETE 1
string split \\ -- (eval $COMP_WORDS[1])
set -l completions
if string match -q '2.*' $version
set completions (eval $COMP_WORDS[1])
else
set completions ($COMP_WORDS[1])
end
string split \\ -- $completions
end
complete -fa "(__fish_complete_pip)" -c {prog}
""",
@ -113,7 +118,7 @@ class CompletionCommand(Command):
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options: Values, args: List[str]) -> int:
def run(self, options: Values, args: list[str]) -> int:
"""Prints the completion code of the given shell"""
shells = COMPLETION_SCRIPTS.keys()
shell_options = ["--" + shell for shell in sorted(shells)]

View File

@ -1,8 +1,10 @@
from __future__ import annotations
import logging
import os
import subprocess
from optparse import Values
from typing import Any, List, Optional
from typing import Any, Callable
from pip._internal.cli.base_command import Command
from pip._internal.cli.status_codes import ERROR, SUCCESS
@ -93,8 +95,8 @@ class ConfigurationCommand(Command):
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options: Values, args: List[str]) -> int:
handlers = {
def handler_map(self) -> dict[str, Callable[[Values, list[str]], None]]:
return {
"list": self.list_values,
"edit": self.open_in_editor,
"get": self.get_name,
@ -103,11 +105,14 @@ class ConfigurationCommand(Command):
"debug": self.list_config_values,
}
def run(self, options: Values, args: list[str]) -> int:
handler_map = self.handler_map()
# Determine action
if not args or args[0] not in handlers:
if not args or args[0] not in handler_map:
logger.error(
"Need an action (%s) to perform.",
", ".join(sorted(handlers)),
", ".join(sorted(handler_map)),
)
return ERROR
@ -131,14 +136,14 @@ class ConfigurationCommand(Command):
# Error handling happens here, not in the action-handlers.
try:
handlers[action](options, args[1:])
handler_map[action](options, args[1:])
except PipError as e:
logger.error(e.args[0])
return ERROR
return SUCCESS
def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]:
def _determine_file(self, options: Values, need_value: bool) -> Kind | None:
file_options = [
key
for key, value in (
@ -168,31 +173,32 @@ class ConfigurationCommand(Command):
"(--user, --site, --global) to perform."
)
def list_values(self, options: Values, args: List[str]) -> None:
def list_values(self, options: Values, args: list[str]) -> None:
self._get_n_args(args, "list", n=0)
for key, value in sorted(self.configuration.items()):
for key, value in sorted(value.items()):
write_output("%s=%r", key, value)
def get_name(self, options: Values, args: List[str]) -> None:
def get_name(self, options: Values, args: list[str]) -> None:
key = self._get_n_args(args, "get [name]", n=1)
value = self.configuration.get_value(key)
write_output("%s", value)
def set_name_value(self, options: Values, args: List[str]) -> None:
def set_name_value(self, options: Values, args: list[str]) -> None:
key, value = self._get_n_args(args, "set [name] [value]", n=2)
self.configuration.set_value(key, value)
self._save_configuration()
def unset_name(self, options: Values, args: List[str]) -> None:
def unset_name(self, options: Values, args: list[str]) -> None:
key = self._get_n_args(args, "unset [name]", n=1)
self.configuration.unset_value(key)
self._save_configuration()
def list_config_values(self, options: Values, args: List[str]) -> None:
def list_config_values(self, options: Values, args: list[str]) -> None:
"""List config key-value pairs across different config files"""
self._get_n_args(args, "debug", n=0)
@ -206,13 +212,15 @@ class ConfigurationCommand(Command):
file_exists = os.path.exists(fname)
write_output("%s, exists: %r", fname, file_exists)
if file_exists:
self.print_config_file_values(variant)
self.print_config_file_values(variant, fname)
def print_config_file_values(self, variant: Kind) -> None:
def print_config_file_values(self, variant: Kind, fname: str) -> None:
"""Get key-value pairs from the file of a variant"""
for name, value in self.configuration.get_values_in_config(variant).items():
with indent_log():
write_output("%s: %s", name, value)
if name == fname:
for confname, confvalue in value.items():
write_output("%s: %s", confname, confvalue)
def print_env_var_values(self) -> None:
"""Get key-values pairs present as environment variables"""
@ -222,7 +230,7 @@ class ConfigurationCommand(Command):
env_var = f"PIP_{key.upper()}"
write_output("%s=%r", env_var, value)
def open_in_editor(self, options: Values, args: List[str]) -> None:
def open_in_editor(self, options: Values, args: list[str]) -> None:
editor = self._determine_editor(options)
fname = self.configuration.get_file_to_edit()
@ -244,7 +252,7 @@ class ConfigurationCommand(Command):
except subprocess.CalledProcessError as e:
raise PipError(f"Editor Subprocess exited with exit code {e.returncode}")
def _get_n_args(self, args: List[str], example: str, n: int) -> Any:
def _get_n_args(self, args: list[str], example: str, n: int) -> Any:
"""Helper to make sure the command got the right number of arguments"""
if len(args) != n:
msg = (

View File

@ -1,10 +1,12 @@
from __future__ import annotations
import locale
import logging
import os
import sys
from optparse import Values
from types import ModuleType
from typing import Any, Dict, List, Optional
from typing import Any
import pip._vendor
from pip._vendor.certifi import where
@ -34,7 +36,7 @@ def show_sys_implementation() -> None:
show_value("name", implementation_name)
def create_vendor_txt_map() -> Dict[str, str]:
def create_vendor_txt_map() -> dict[str, str]:
with open_text_resource("pip._vendor", "vendor.txt") as f:
# Purge non version specifying lines.
# Also, remove any space prefix or suffixes (including comments).
@ -46,7 +48,7 @@ def create_vendor_txt_map() -> Dict[str, str]:
return dict(line.split("==", 1) for line in lines)
def get_module_from_module_name(module_name: str) -> Optional[ModuleType]:
def get_module_from_module_name(module_name: str) -> ModuleType | None:
# Module name can be uppercase in vendor.txt for some reason...
module_name = module_name.lower().replace("-", "_")
# PATCH: setuptools is actually only pkg_resources.
@ -64,7 +66,7 @@ def get_module_from_module_name(module_name: str) -> Optional[ModuleType]:
raise
def get_vendor_version_from_module(module_name: str) -> Optional[str]:
def get_vendor_version_from_module(module_name: str) -> str | None:
module = get_module_from_module_name(module_name)
version = getattr(module, "__version__", None)
@ -79,7 +81,7 @@ def get_vendor_version_from_module(module_name: str) -> Optional[str]:
return version
def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None:
def show_actual_vendor_versions(vendor_txt_versions: dict[str, str]) -> None:
"""Log the actual version and print extra info if there is
a conflict or if the actual version could not be imported.
"""
@ -169,7 +171,7 @@ class DebugCommand(Command):
self.parser.insert_option_group(0, self.cmd_opts)
self.parser.config.load()
def run(self, options: Values, args: List[str]) -> int:
def run(self, options: Values, args: list[str]) -> int:
logger.warning(
"This command is only meant for debugging. "
"Do not use this with automation for parsing and getting these "

View File

@ -1,14 +1,12 @@
import logging
import os
from optparse import Values
from typing import List
from pip._internal.cli import cmdoptions
from pip._internal.cli.cmdoptions import make_target_python
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
from pip._internal.cli.status_codes import SUCCESS
from pip._internal.operations.build.build_tracker import get_build_tracker
from pip._internal.req.req_install import check_legacy_setup_py_options
from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
from pip._internal.utils.temp_dir import TempDirectory
@ -37,9 +35,9 @@ class DownloadCommand(RequirementCommand):
def add_options(self) -> None:
self.cmd_opts.add_option(cmdoptions.constraints())
self.cmd_opts.add_option(cmdoptions.build_constraints())
self.cmd_opts.add_option(cmdoptions.requirements())
self.cmd_opts.add_option(cmdoptions.no_deps())
self.cmd_opts.add_option(cmdoptions.global_options())
self.cmd_opts.add_option(cmdoptions.no_binary())
self.cmd_opts.add_option(cmdoptions.only_binary())
self.cmd_opts.add_option(cmdoptions.prefer_binary())
@ -49,7 +47,6 @@ class DownloadCommand(RequirementCommand):
self.cmd_opts.add_option(cmdoptions.progress_bar())
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
self.cmd_opts.add_option(cmdoptions.use_pep517())
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
self.cmd_opts.add_option(cmdoptions.check_build_deps())
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
@ -75,13 +72,14 @@ class DownloadCommand(RequirementCommand):
self.parser.insert_option_group(0, self.cmd_opts)
@with_cleanup
def run(self, options: Values, args: List[str]) -> int:
def run(self, options: Values, args: list[str]) -> int:
options.ignore_installed = True
# editable doesn't really make sense for `pip download`, but the bowels
# of the RequirementSet code require that property.
options.editables = []
cmdoptions.check_dist_restriction(options)
cmdoptions.check_build_constraints(options)
options.download_dir = normalize_path(options.download_dir)
ensure_dir(options.download_dir)
@ -105,7 +103,6 @@ class DownloadCommand(RequirementCommand):
)
reqs = self.get_requirements(args, options, finder, session)
check_legacy_setup_py_options(options, reqs)
preparer = self.make_requirement_preparer(
temp_build_dir=directory,
@ -123,7 +120,6 @@ class DownloadCommand(RequirementCommand):
finder=finder,
options=options,
ignore_requires_python=options.ignore_requires_python,
use_pep517=options.use_pep517,
py_version_info=options.python_version,
)
@ -131,15 +127,15 @@ class DownloadCommand(RequirementCommand):
requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
downloaded: List[str] = []
preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
downloaded: list[str] = []
for req in requirement_set.requirements.values():
if req.satisfied_by is None:
assert req.name is not None
preparer.save_linked_requirement(req)
downloaded.append(req.name)
preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
if downloaded:
write_output("Successfully downloaded %s", " ".join(downloaded))

View File

@ -1,6 +1,5 @@
import sys
from optparse import Values
from typing import AbstractSet, List
from pip._internal.cli import cmdoptions
from pip._internal.cli.base_command import Command
@ -13,7 +12,7 @@ def _should_suppress_build_backends() -> bool:
return sys.version_info < (3, 12)
def _dev_pkgs() -> AbstractSet[str]:
def _dev_pkgs() -> set[str]:
pkgs = {"pip"}
if _should_suppress_build_backends():
@ -32,7 +31,6 @@ class FreezeCommand(Command):
ignore_require_venv = True
usage = """
%prog [options]"""
log_streams = ("ext://sys.stderr", "ext://sys.stderr")
def add_options(self) -> None:
self.cmd_opts.add_option(
@ -86,7 +84,7 @@ class FreezeCommand(Command):
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options: Values, args: List[str]) -> int:
def run(self, options: Values, args: list[str]) -> int:
skip = set(stdlib_pkgs)
if not options.freeze_all:
skip.update(_dev_pkgs())

View File

@ -2,7 +2,6 @@ import hashlib
import logging
import sys
from optparse import Values
from typing import List
from pip._internal.cli.base_command import Command
from pip._internal.cli.status_codes import ERROR, SUCCESS
@ -37,7 +36,7 @@ class HashCommand(Command):
)
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options: Values, args: List[str]) -> int:
def run(self, options: Values, args: list[str]) -> int:
if not args:
self.parser.print_usage(sys.stderr)
return ERROR

View File

@ -1,5 +1,4 @@
from optparse import Values
from typing import List
from pip._internal.cli.base_command import Command
from pip._internal.cli.status_codes import SUCCESS
@ -13,7 +12,7 @@ class HelpCommand(Command):
%prog <command>"""
ignore_require_venv = True
def run(self, options: Values, args: List[str]) -> int:
def run(self, options: Values, args: list[str]) -> int:
from pip._internal.commands import (
commands_dict,
create_command,

View File

@ -1,13 +1,20 @@
from __future__ import annotations
import json
import logging
from collections.abc import Iterable
from optparse import Values
from typing import Any, Iterable, List, Optional
from typing import Any, Callable
from pip._vendor.packaging.version import Version
from pip._internal.cli import cmdoptions
from pip._internal.cli.req_command import IndexGroupCommand
from pip._internal.cli.status_codes import ERROR, SUCCESS
from pip._internal.commands.search import print_dist_installation_info
from pip._internal.commands.search import (
get_installed_distribution,
print_dist_installation_info,
)
from pip._internal.exceptions import CommandError, DistributionNotFound, PipError
from pip._internal.index.collector import LinkCollector
from pip._internal.index.package_finder import PackageFinder
@ -34,6 +41,7 @@ class IndexCommand(IndexGroupCommand):
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
self.cmd_opts.add_option(cmdoptions.pre())
self.cmd_opts.add_option(cmdoptions.json())
self.cmd_opts.add_option(cmdoptions.no_binary())
self.cmd_opts.add_option(cmdoptions.only_binary())
@ -45,22 +53,19 @@ class IndexCommand(IndexGroupCommand):
self.parser.insert_option_group(0, index_opts)
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options: Values, args: List[str]) -> int:
handlers = {
def handler_map(self) -> dict[str, Callable[[Values, list[str]], None]]:
return {
"versions": self.get_available_package_versions,
}
logger.warning(
"pip index is currently an experimental command. "
"It may be removed/changed in a future release "
"without prior warning."
)
def run(self, options: Values, args: list[str]) -> int:
handler_map = self.handler_map()
# Determine action
if not args or args[0] not in handlers:
if not args or args[0] not in handler_map:
logger.error(
"Need an action (%s) to perform.",
", ".join(sorted(handlers)),
", ".join(sorted(handler_map)),
)
return ERROR
@ -68,7 +73,7 @@ class IndexCommand(IndexGroupCommand):
# Error handling happens here, not in the action-handlers.
try:
handlers[action](options, args[1:])
handler_map[action](options, args[1:])
except PipError as e:
logger.error(e.args[0])
return ERROR
@ -79,8 +84,8 @@ class IndexCommand(IndexGroupCommand):
self,
options: Values,
session: PipSession,
target_python: Optional[TargetPython] = None,
ignore_requires_python: Optional[bool] = None,
target_python: TargetPython | None = None,
ignore_requires_python: bool | None = None,
) -> PackageFinder:
"""
Create a package finder appropriate to the index command.
@ -100,7 +105,7 @@ class IndexCommand(IndexGroupCommand):
target_python=target_python,
)
def get_available_package_versions(self, options: Values, args: List[Any]) -> None:
def get_available_package_versions(self, options: Values, args: list[Any]) -> None:
if len(args) != 1:
raise CommandError("You need to specify exactly one argument")
@ -134,6 +139,21 @@ class IndexCommand(IndexGroupCommand):
formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)]
latest = formatted_versions[0]
dist = get_installed_distribution(query)
if options.json:
structured_output = {
"name": query,
"versions": formatted_versions,
"latest": latest,
}
if dist is not None:
structured_output["installed_version"] = str(dist.version)
write_output(json.dumps(structured_output))
else:
write_output(f"{query} ({latest})")
write_output("Available versions: {}".format(", ".join(formatted_versions)))
print_dist_installation_info(query, latest)
print_dist_installation_info(latest, dist)

View File

@ -1,6 +1,6 @@
import logging
from optparse import Values
from typing import Any, Dict, List
from typing import Any
from pip._vendor.packaging.markers import default_environment
from pip._vendor.rich import print_json
@ -45,7 +45,7 @@ class InspectCommand(Command):
self.cmd_opts.add_option(cmdoptions.list_path())
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options: Values, args: List[str]) -> int:
def run(self, options: Values, args: list[str]) -> int:
cmdoptions.check_list_path_option(options)
dists = get_environment(options.path).iter_installed_distributions(
local_only=options.local,
@ -62,8 +62,8 @@ class InspectCommand(Command):
print_json(data=output)
return SUCCESS
def _dist_to_dict(self, dist: BaseDistribution) -> Dict[str, Any]:
res: Dict[str, Any] = {
def _dist_to_dict(self, dist: BaseDistribution) -> dict[str, Any]:
res: dict[str, Any] = {
"metadata": dist.metadata_dict,
"metadata_location": dist.info_location,
}

View File

@ -1,3 +1,5 @@
from __future__ import annotations
import errno
import json
import operator
@ -5,11 +7,19 @@ import os
import shutil
import site
from optparse import SUPPRESS_HELP, Values
from typing import List, Optional
from pathlib import Path
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.requests.exceptions import InvalidProxyURL
from pip._vendor.rich import print_json
# Eagerly import self_outdated_check to avoid crashes. Otherwise,
# this module would be imported *after* pip was replaced, resulting
# in crashes if the new self_outdated_check module was incompatible
# with the rest of pip that's already imported, or allowing a
# wheel to execute arbitrary code on install by replacing
# self_outdated_check.
import pip._internal.self_outdated_check # noqa: F401
from pip._internal.cache import WheelCache
from pip._internal.cli import cmdoptions
from pip._internal.cli.cmdoptions import make_target_python
@ -18,7 +28,11 @@ from pip._internal.cli.req_command import (
with_cleanup,
)
from pip._internal.cli.status_codes import ERROR, SUCCESS
from pip._internal.exceptions import CommandError, InstallationError
from pip._internal.exceptions import (
CommandError,
InstallationError,
InstallWheelBuildError,
)
from pip._internal.locations import get_scheme
from pip._internal.metadata import get_environment
from pip._internal.models.installation_report import InstallationReport
@ -27,7 +41,6 @@ from pip._internal.operations.check import ConflictDetails, check_install_confli
from pip._internal.req import install_given_reqs
from pip._internal.req.req_install import (
InstallRequirement,
check_legacy_setup_py_options,
)
from pip._internal.utils.compat import WINDOWS
from pip._internal.utils.filesystem import test_writable_dir
@ -45,7 +58,7 @@ from pip._internal.utils.virtualenv import (
running_under_virtualenv,
virtualenv_no_global,
)
from pip._internal.wheel_builder import build, should_build_for_install_command
from pip._internal.wheel_builder import build
logger = getLogger(__name__)
@ -73,6 +86,7 @@ class InstallCommand(RequirementCommand):
def add_options(self) -> None:
self.cmd_opts.add_option(cmdoptions.requirements())
self.cmd_opts.add_option(cmdoptions.constraints())
self.cmd_opts.add_option(cmdoptions.build_constraints())
self.cmd_opts.add_option(cmdoptions.no_deps())
self.cmd_opts.add_option(cmdoptions.pre())
@ -196,12 +210,10 @@ class InstallCommand(RequirementCommand):
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
self.cmd_opts.add_option(cmdoptions.use_pep517())
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
self.cmd_opts.add_option(cmdoptions.check_build_deps())
self.cmd_opts.add_option(cmdoptions.override_externally_managed())
self.cmd_opts.add_option(cmdoptions.config_settings())
self.cmd_opts.add_option(cmdoptions.global_options())
self.cmd_opts.add_option(
"--compile",
@ -264,7 +276,7 @@ class InstallCommand(RequirementCommand):
)
@with_cleanup
def run(self, options: Values, args: List[str]) -> int:
def run(self, options: Values, args: list[str]) -> int:
if options.use_user_site and options.target_dir is not None:
raise CommandError("Can not combine '--user' and '--target'")
@ -289,6 +301,7 @@ class InstallCommand(RequirementCommand):
if options.upgrade:
upgrade_strategy = options.upgrade_strategy
cmdoptions.check_build_constraints(options)
cmdoptions.check_dist_restriction(options, check_target=True)
logger.verbose("Using %s", get_pip_version())
@ -300,8 +313,8 @@ class InstallCommand(RequirementCommand):
isolated_mode=options.isolated_mode,
)
target_temp_dir: Optional[TempDirectory] = None
target_temp_dir_path: Optional[str] = None
target_temp_dir: TempDirectory | None = None
target_temp_dir_path: str | None = None
if options.target_dir:
options.ignore_installed = True
options.target_dir = os.path.abspath(options.target_dir)
@ -320,8 +333,6 @@ class InstallCommand(RequirementCommand):
target_temp_dir_path = target_temp_dir.path
self.enter_context(target_temp_dir)
global_options = options.global_options or []
session = self.get_default_session(options)
target_python = make_target_python(options)
@ -341,7 +352,6 @@ class InstallCommand(RequirementCommand):
try:
reqs = self.get_requirements(args, options, finder, session)
check_legacy_setup_py_options(options, reqs)
wheel_cache = WheelCache(options.cache_dir)
@ -370,7 +380,6 @@ class InstallCommand(RequirementCommand):
ignore_requires_python=options.ignore_requires_python,
force_reinstall=options.force_reinstall,
upgrade_strategy=upgrade_strategy,
use_pep517=options.use_pep517,
py_version_info=options.python_version,
)
@ -400,6 +409,13 @@ class InstallCommand(RequirementCommand):
)
return SUCCESS
# If there is any more preparation to do for the actual installation, do
# so now. This includes actually downloading the files in the case that
# we have been using PEP-658 metadata so far.
preparer.prepare_linked_requirements_more(
requirement_set.requirements.values()
)
try:
pip_req = requirement_set.get_requirement("pip")
except KeyError:
@ -408,40 +424,25 @@ class InstallCommand(RequirementCommand):
# If we're not replacing an already installed pip,
# we're not modifying it.
modifying_pip = pip_req.satisfied_by is None
if modifying_pip:
# Eagerly import this module to avoid crashes. Otherwise, this
# module would be imported *after* pip was replaced, resulting in
# crashes if the new self_outdated_check module was incompatible
# with the rest of pip that's already imported.
import pip._internal.self_outdated_check # noqa: F401
protect_pip_from_modification_on_windows(modifying_pip=modifying_pip)
reqs_to_build = [
r
for r in requirement_set.requirements.values()
if should_build_for_install_command(r)
r for r in requirement_set.requirements_to_install if not r.is_wheel
]
_, build_failures = build(
reqs_to_build,
wheel_cache=wheel_cache,
verify=True,
build_options=[],
global_options=global_options,
)
if build_failures:
raise InstallationError(
"ERROR: Failed to build installable wheels for some "
"pyproject.toml based projects ({})".format(
", ".join(r.name for r in build_failures) # type: ignore
)
)
raise InstallWheelBuildError(build_failures)
to_install = resolver.get_installation_order(requirement_set)
# Check for conflicts in the package set we're installing.
conflicts: Optional[ConflictDetails] = None
conflicts: ConflictDetails | None = None
should_warn_about_conflicts = (
not options.ignore_dependencies and options.warn_about_conflicts
)
@ -456,13 +457,13 @@ class InstallCommand(RequirementCommand):
installed = install_given_reqs(
to_install,
global_options,
root=options.root_path,
home=target_temp_dir_path,
prefix=options.prefix_path,
warn_script_location=warn_script_location,
use_user_site=options.use_user_site,
pycompile=options.compile,
progress_bar=options.progress_bar,
)
lib_locations = get_lib_location_guesses(
@ -578,8 +579,8 @@ class InstallCommand(RequirementCommand):
shutil.move(os.path.join(lib_dir, item), target_item_dir)
def _determine_conflicts(
self, to_install: List[InstallRequirement]
) -> Optional[ConflictDetails]:
self, to_install: list[InstallRequirement]
) -> ConflictDetails | None:
try:
return check_install_conflicts(to_install)
except Exception:
@ -596,7 +597,7 @@ class InstallCommand(RequirementCommand):
if not missing and not conflicting:
return
parts: List[str] = []
parts: list[str] = []
if resolver_variant == "legacy":
parts.append(
"pip's legacy dependency resolver does not consider dependency "
@ -642,11 +643,11 @@ class InstallCommand(RequirementCommand):
def get_lib_location_guesses(
user: bool = False,
home: Optional[str] = None,
root: Optional[str] = None,
home: str | None = None,
root: str | None = None,
isolated: bool = False,
prefix: Optional[str] = None,
) -> List[str]:
prefix: str | None = None,
) -> list[str]:
scheme = get_scheme(
"",
user=user,
@ -658,7 +659,7 @@ def get_lib_location_guesses(
return [scheme.purelib, scheme.platlib]
def site_packages_writable(root: Optional[str], isolated: bool) -> bool:
def site_packages_writable(root: str | None, isolated: bool) -> bool:
return all(
test_writable_dir(d)
for d in set(get_lib_location_guesses(root=root, isolated=isolated))
@ -666,10 +667,10 @@ def site_packages_writable(root: Optional[str], isolated: bool) -> bool:
def decide_user_install(
use_user_site: Optional[bool],
prefix_path: Optional[str] = None,
target_dir: Optional[str] = None,
root_path: Optional[str] = None,
use_user_site: bool | None,
prefix_path: str | None = None,
target_dir: str | None = None,
root_path: str | None = None,
isolated_mode: bool = False,
) -> bool:
"""Determine whether to do a user install based on the input options.
@ -686,6 +687,7 @@ def decide_user_install(
logger.debug("Non-user install by explicit request")
return False
# If we have been asked for a user install explicitly, check compatibility.
if use_user_site:
if prefix_path:
raise CommandError(
@ -697,6 +699,13 @@ def decide_user_install(
"Can not perform a '--user' install. User site-packages "
"are not visible in this virtualenv."
)
# Catch all remaining cases which honour the site.ENABLE_USER_SITE
# value, such as a plain Python installation (e.g. no virtualenv).
if not site.ENABLE_USER_SITE:
raise InstallationError(
"Can not perform a '--user' install. User site-packages "
"are disabled for this Python."
)
logger.debug("User install by explicit request")
return True
@ -764,14 +773,26 @@ def create_os_error_message(
parts.append(permissions_part)
parts.append(".\n")
# Suggest the user to enable Long Paths if path length is
# more than 260
if (
WINDOWS
and error.errno == errno.ENOENT
and error.filename
and len(error.filename) > 260
):
# Suggest to check "pip config debug" in case of invalid proxy
if type(error) is InvalidProxyURL:
parts.append(
'Consider checking your local proxy configuration with "pip config debug"'
)
parts.append(".\n")
# On Windows, errors like EINVAL or ENOENT may occur
# if a file or folder name exceeds 255 characters,
# or if the full path exceeds 260 characters and long path support isn't enabled.
# This condition checks for such cases and adds a hint to the error output.
if WINDOWS and error.errno in (errno.EINVAL, errno.ENOENT) and error.filename:
if any(len(part) > 255 for part in Path(error.filename).parts):
parts.append(
"HINT: This error might be caused by a file or folder name exceeding "
"255 characters, which is a Windows limitation even if long paths "
"are enabled.\n "
)
if len(error.filename) > 260:
parts.append(
"HINT: This error might have occurred since "
"this system does not have Windows Long Path "
@ -779,5 +800,4 @@ def create_os_error_message(
"how to enable this at "
"https://pip.pypa.io/warnings/enable-long-paths\n"
)
return "".join(parts).strip() + "\n"

View File

@ -1,10 +1,14 @@
from __future__ import annotations
import json
import logging
from collections.abc import Generator, Sequence
from email.parser import Parser
from optparse import Values
from typing import TYPE_CHECKING, Generator, List, Optional, Sequence, Tuple, cast
from typing import TYPE_CHECKING, cast
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.packaging.version import Version
from pip._vendor.packaging.version import InvalidVersion, Version
from pip._internal.cli import cmdoptions
from pip._internal.cli.index_command import IndexGroupCommand
@ -125,7 +129,7 @@ class ListCommand(IndexGroupCommand):
"--include-editable",
action="store_true",
dest="include_editable",
help="Include editable package from output.",
help="Include editable package in output.",
default=True,
)
self.cmd_opts.add_option(cmdoptions.list_exclude())
@ -139,8 +143,8 @@ class ListCommand(IndexGroupCommand):
super().handle_pip_version_check(options)
def _build_package_finder(
self, options: Values, session: "PipSession"
) -> "PackageFinder":
self, options: Values, session: PipSession
) -> PackageFinder:
"""
Create a package finder appropriate to this list command.
"""
@ -161,7 +165,7 @@ class ListCommand(IndexGroupCommand):
selection_prefs=selection_prefs,
)
def run(self, options: Values, args: List[str]) -> int:
def run(self, options: Values, args: list[str]) -> int:
if options.outdated and options.uptodate:
raise CommandError("Options --outdated and --uptodate cannot be combined.")
@ -176,7 +180,7 @@ class ListCommand(IndexGroupCommand):
if options.excludes:
skip.update(canonicalize_name(n) for n in options.excludes)
packages: "_ProcessedDists" = [
packages: _ProcessedDists = [
cast("_DistWithLatestInfo", d)
for d in get_environment(options.path).iter_installed_distributions(
local_only=options.local,
@ -203,8 +207,8 @@ class ListCommand(IndexGroupCommand):
return SUCCESS
def get_outdated(
self, packages: "_ProcessedDists", options: Values
) -> "_ProcessedDists":
self, packages: _ProcessedDists, options: Values
) -> _ProcessedDists:
return [
dist
for dist in self.iter_packages_latest_infos(packages, options)
@ -212,8 +216,8 @@ class ListCommand(IndexGroupCommand):
]
def get_uptodate(
self, packages: "_ProcessedDists", options: Values
) -> "_ProcessedDists":
self, packages: _ProcessedDists, options: Values
) -> _ProcessedDists:
return [
dist
for dist in self.iter_packages_latest_infos(packages, options)
@ -221,8 +225,8 @@ class ListCommand(IndexGroupCommand):
]
def get_not_required(
self, packages: "_ProcessedDists", options: Values
) -> "_ProcessedDists":
self, packages: _ProcessedDists, options: Values
) -> _ProcessedDists:
dep_keys = {
canonicalize_name(dep.name)
for dist in packages
@ -235,14 +239,14 @@ class ListCommand(IndexGroupCommand):
return list({pkg for pkg in packages if pkg.canonical_name not in dep_keys})
def iter_packages_latest_infos(
self, packages: "_ProcessedDists", options: Values
) -> Generator["_DistWithLatestInfo", None, None]:
self, packages: _ProcessedDists, options: Values
) -> Generator[_DistWithLatestInfo, None, None]:
with self._build_session(options) as session:
finder = self._build_package_finder(options, session)
def latest_info(
dist: "_DistWithLatestInfo",
) -> Optional["_DistWithLatestInfo"]:
dist: _DistWithLatestInfo,
) -> _DistWithLatestInfo | None:
all_candidates = finder.find_all_candidates(dist.canonical_name)
if not options.pre:
# Remove prereleases
@ -273,7 +277,7 @@ class ListCommand(IndexGroupCommand):
yield dist
def output_package_listing(
self, packages: "_ProcessedDists", options: Values
self, packages: _ProcessedDists, options: Values
) -> None:
packages = sorted(
packages,
@ -284,17 +288,19 @@ class ListCommand(IndexGroupCommand):
self.output_package_listing_columns(data, header)
elif options.list_format == "freeze":
for dist in packages:
try:
req_string = f"{dist.raw_name}=={dist.version}"
except InvalidVersion:
req_string = f"{dist.raw_name}==={dist.raw_version}"
if options.verbose >= 1:
write_output(
"%s==%s (%s)", dist.raw_name, dist.version, dist.location
)
write_output("%s (%s)", req_string, dist.location)
else:
write_output("%s==%s", dist.raw_name, dist.version)
write_output(req_string)
elif options.list_format == "json":
write_output(format_for_json(packages, options))
def output_package_listing_columns(
self, data: List[List[str]], header: List[str]
self, data: list[list[str]], header: list[str]
) -> None:
# insert the header first: we need to know the size of column names
if len(data) > 0:
@ -311,8 +317,8 @@ class ListCommand(IndexGroupCommand):
def format_for_columns(
pkgs: "_ProcessedDists", options: Values
) -> Tuple[List[List[str]], List[str]]:
pkgs: _ProcessedDists, options: Values
) -> tuple[list[list[str]], list[str]]:
"""
Convert the package data into something usable
by output_package_listing_columns.
@ -323,17 +329,29 @@ def format_for_columns(
if running_outdated:
header.extend(["Latest", "Type"])
has_editables = any(x.editable for x in pkgs)
if has_editables:
header.append("Editable project location")
def wheel_build_tag(dist: BaseDistribution) -> str | None:
try:
wheel_file = dist.read_text("WHEEL")
except FileNotFoundError:
return None
return Parser().parsestr(wheel_file).get("Build")
build_tags = [wheel_build_tag(p) for p in pkgs]
has_build_tags = any(build_tags)
if has_build_tags:
header.append("Build")
if options.verbose >= 1:
header.append("Location")
if options.verbose >= 1:
header.append("Installer")
has_editables = any(x.editable for x in pkgs)
if has_editables:
header.append("Editable project location")
data = []
for proj in pkgs:
for i, proj in enumerate(pkgs):
# if we're working on the 'outdated' list, separate out the
# latest_version and type
row = [proj.raw_name, proj.raw_version]
@ -342,6 +360,9 @@ def format_for_columns(
row.append(str(proj.latest_version))
row.append(proj.latest_filetype)
if has_build_tags:
row.append(build_tags[i] or "")
if has_editables:
row.append(proj.editable_project_location or "")
@ -355,12 +376,16 @@ def format_for_columns(
return data, header
def format_for_json(packages: "_ProcessedDists", options: Values) -> str:
def format_for_json(packages: _ProcessedDists, options: Values) -> str:
data = []
for dist in packages:
try:
version = str(dist.version)
except InvalidVersion:
version = dist.raw_version
info = {
"name": dist.raw_name,
"version": str(dist.version),
"version": version,
}
if options.verbose >= 1:
info["location"] = dist.location or ""

View File

@ -1,3 +1,5 @@
from __future__ import annotations
import logging
import shutil
import sys
@ -5,7 +7,7 @@ import textwrap
import xmlrpc.client
from collections import OrderedDict
from optparse import Values
from typing import TYPE_CHECKING, Dict, List, Optional, TypedDict
from typing import TypedDict
from pip._vendor.packaging.version import parse as parse_version
@ -14,17 +16,17 @@ from pip._internal.cli.req_command import SessionCommandMixin
from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
from pip._internal.exceptions import CommandError
from pip._internal.metadata import get_default_environment
from pip._internal.metadata.base import BaseDistribution
from pip._internal.models.index import PyPI
from pip._internal.network.xmlrpc import PipXmlrpcTransport
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import write_output
if TYPE_CHECKING:
class TransformedHit(TypedDict):
class TransformedHit(TypedDict):
name: str
summary: str
versions: List[str]
versions: list[str]
logger = logging.getLogger(__name__)
@ -49,7 +51,7 @@ class SearchCommand(Command, SessionCommandMixin):
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options: Values, args: List[str]) -> int:
def run(self, options: Values, args: list[str]) -> int:
if not args:
raise CommandError("Missing required argument (search query).")
query = args
@ -65,7 +67,7 @@ class SearchCommand(Command, SessionCommandMixin):
return SUCCESS
return NO_MATCHES_FOUND
def search(self, query: List[str], options: Values) -> List[Dict[str, str]]:
def search(self, query: list[str], options: Values) -> list[dict[str, str]]:
index_url = options.index
session = self.get_default_session(options)
@ -83,13 +85,13 @@ class SearchCommand(Command, SessionCommandMixin):
return hits
def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]:
def transform_hits(hits: list[dict[str, str]]) -> list[TransformedHit]:
"""
The list from pypi is really a list of versions. We want a list of
packages with the list of versions stored inline. This converts the
list from pypi into one we can use.
"""
packages: Dict[str, "TransformedHit"] = OrderedDict()
packages: dict[str, TransformedHit] = OrderedDict()
for hit in hits:
name = hit["name"]
summary = hit["summary"]
@ -111,9 +113,7 @@ def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]:
return list(packages.values())
def print_dist_installation_info(name: str, latest: str) -> None:
env = get_default_environment()
dist = env.get_distribution(name)
def print_dist_installation_info(latest: str, dist: BaseDistribution | None) -> None:
if dist is not None:
with indent_log():
if dist.version == latest:
@ -130,10 +130,15 @@ def print_dist_installation_info(name: str, latest: str) -> None:
write_output("LATEST: %s", latest)
def get_installed_distribution(name: str) -> BaseDistribution | None:
env = get_default_environment()
return env.get_distribution(name)
def print_results(
hits: List["TransformedHit"],
name_column_width: Optional[int] = None,
terminal_width: Optional[int] = None,
hits: list[TransformedHit],
name_column_width: int | None = None,
terminal_width: int | None = None,
) -> None:
if not hits:
return
@ -163,10 +168,11 @@ def print_results(
line = f"{name_latest:{name_column_width}} - {summary}"
try:
write_output(line)
print_dist_installation_info(name, latest)
dist = get_installed_distribution(name)
print_dist_installation_info(latest, dist)
except UnicodeEncodeError:
pass
def highest_version(versions: List[str]) -> str:
def highest_version(versions: list[str]) -> str:
return max(versions, key=parse_version)

View File

@ -1,6 +1,10 @@
from __future__ import annotations
import logging
import string
from collections.abc import Generator, Iterable, Iterator
from optparse import Values
from typing import Generator, Iterable, Iterator, List, NamedTuple, Optional
from typing import NamedTuple
from pip._vendor.packaging.requirements import InvalidRequirement
from pip._vendor.packaging.utils import canonicalize_name
@ -13,6 +17,13 @@ from pip._internal.utils.misc import write_output
logger = logging.getLogger(__name__)
def normalize_project_url_label(label: str) -> str:
# This logic is from PEP 753 (Well-known Project URLs in Metadata).
chars_to_remove = string.punctuation + string.whitespace
removal_map = str.maketrans("", "", chars_to_remove)
return label.translate(removal_map).lower()
class ShowCommand(Command):
"""
Show information about one or more installed packages.
@ -36,7 +47,7 @@ class ShowCommand(Command):
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options: Values, args: List[str]) -> int:
def run(self, options: Values, args: list[str]) -> int:
if not args:
logger.warning("ERROR: Please provide a package name or names.")
return ERROR
@ -54,23 +65,24 @@ class _PackageInfo(NamedTuple):
name: str
version: str
location: str
editable_project_location: Optional[str]
requires: List[str]
required_by: List[str]
editable_project_location: str | None
requires: list[str]
required_by: list[str]
installer: str
metadata_version: str
classifiers: List[str]
classifiers: list[str]
summary: str
homepage: str
project_urls: List[str]
project_urls: list[str]
author: str
author_email: str
license: str
entry_points: List[str]
files: Optional[List[str]]
license_expression: str
entry_points: list[str]
files: list[str] | None
def search_packages_info(query: List[str]) -> Generator[_PackageInfo, None, None]:
def search_packages_info(query: list[str]) -> Generator[_PackageInfo, None, None]:
"""
Gather details from installed distributions. Print distribution name,
version, location, and installed files. Installed files requires a
@ -123,7 +135,7 @@ def search_packages_info(query: List[str]) -> Generator[_PackageInfo, None, None
files_iter = dist.iter_declared_entries()
if files_iter is None:
files: Optional[List[str]] = None
files: list[str] | None = None
else:
files = sorted(files_iter)
@ -134,13 +146,9 @@ def search_packages_info(query: List[str]) -> Generator[_PackageInfo, None, None
if not homepage:
# It's common that there is a "homepage" Project-URL, but Home-page
# remains unset (especially as PEP 621 doesn't surface the field).
#
# This logic was taken from PyPI's codebase.
for url in project_urls:
url_label, url = url.split(",", maxsplit=1)
normalized_label = (
url_label.casefold().replace("-", "").replace("_", "").strip()
)
normalized_label = normalize_project_url_label(url_label)
if normalized_label == "homepage":
homepage = url.strip()
break
@ -161,6 +169,7 @@ def search_packages_info(query: List[str]) -> Generator[_PackageInfo, None, None
author=metadata.get("Author", ""),
author_email=metadata.get("Author-email", ""),
license=metadata.get("License", ""),
license_expression=metadata.get("License-Expression", ""),
entry_points=entry_points,
files=files,
)
@ -180,12 +189,17 @@ def print_results(
if i > 0:
write_output("---")
metadata_version_tuple = tuple(map(int, dist.metadata_version.split(".")))
write_output("Name: %s", dist.name)
write_output("Version: %s", dist.version)
write_output("Summary: %s", dist.summary)
write_output("Home-page: %s", dist.homepage)
write_output("Author: %s", dist.author)
write_output("Author-email: %s", dist.author_email)
if metadata_version_tuple >= (2, 4) and dist.license_expression:
write_output("License-Expression: %s", dist.license_expression)
else:
write_output("License: %s", dist.license)
write_output("Location: %s", dist.location)
if dist.editable_project_location is not None:

Some files were not shown because too many files have changed in this diff Show More