fix:fastapi

This commit is contained in:
gongwenxin 2025-08-27 16:29:44 +08:00
parent 96b8739395
commit a0ee9aa312
4 changed files with 114 additions and 586 deletions

View File

@ -282,56 +282,40 @@ cd "$TEMP_BUILD_DIR"
if [[ "$SELECTED_SERVICE_ARCH" == "dual" ]]; then if [[ "$SELECTED_SERVICE_ARCH" == "dual" ]]; then
# 双服务架构 - 使用supervisor管理两个服务 # 双服务架构 - 使用supervisor管理两个服务
cat > "Dockerfile" << 'EOF' cat > "Dockerfile" << 'EOF'
# 多阶段构建 - 构建阶段 # 使用稳定的Python基础镜像
FROM python:3.11-alpine AS builder FROM python:3.11-alpine
# 安装构建依赖 # 安装系统依赖
RUN apk add --no-cache \ RUN apk update && apk add --no-cache \
gcc \ gcc \
musl-dev \ musl-dev \
libffi-dev \ libffi-dev \
openssl-dev \ openssl-dev \
cargo \ python3-dev \
rust build-base \
linux-headers \
# 设置工作目录
WORKDIR /app
# 复制依赖文件
COPY requirements.txt .
# 创建虚拟环境并安装依赖
RUN python -m venv /opt/venv
ENV PATH="/opt/venv/bin:$PATH"
RUN pip install --no-cache-dir --upgrade pip setuptools wheel
RUN pip install --no-cache-dir -r requirements.txt
# 运行阶段
FROM python:3.11-alpine AS runtime
# 安装运行时依赖
RUN apk add --no-cache \
supervisor \ supervisor \
curl \ curl \
bash \ bash \
tzdata tzdata && \
rm -rf /var/cache/apk/*
# 从构建阶段复制虚拟环境
COPY --from=builder /opt/venv /opt/venv
ENV PATH="/opt/venv/bin:$PATH"
# 设置工作目录 # 设置工作目录
WORKDIR /app WORKDIR /app
# 复制依赖文件并安装Python包
COPY requirements.txt .
RUN pip install --no-cache-dir --upgrade pip setuptools wheel && \
pip install --no-cache-dir -r requirements.txt
# 复制应用代码 # 复制应用代码
COPY . . COPY . .
# 创建supervisor配置 # 创建supervisor配置目录
RUN mkdir -p /etc/supervisor/conf.d RUN mkdir -p /etc/supervisor/conf.d /var/log/supervisor /app/logs /app/test_reports /app/uploads
COPY supervisord.conf /etc/supervisor/conf.d/
# 创建必要目录 # 复制supervisor配置
RUN mkdir -p /var/log/supervisor /app/logs /app/test_reports /app/uploads COPY supervisord.conf /etc/supervisor/conf.d/
# 创建非root用户 # 创建非root用户
RUN addgroup -g 1000 appuser && \ RUN addgroup -g 1000 appuser && \
@ -346,7 +330,7 @@ ENV FLASK_ENV=production
ENV PYTHONUNBUFFERED=1 ENV PYTHONUNBUFFERED=1
# 健康检查 # 健康检查
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
CMD curl -f http://localhost:5050/ || exit 1 CMD curl -f http://localhost:5050/ || exit 1
# 暴露端口 # 暴露端口
@ -409,50 +393,38 @@ EOF
elif [[ "$SELECTED_SERVICE_ARCH" == "fastapi" ]]; then elif [[ "$SELECTED_SERVICE_ARCH" == "fastapi" ]]; then
cat > "Dockerfile" << 'EOF' cat > "Dockerfile" << 'EOF'
# 多阶段构建 - 构建阶段 # 使用稳定的Python基础镜像
FROM python:3.11-alpine AS builder FROM python:3.11-alpine
# 安装构建依赖 # 安装系统依赖
RUN apk add --no-cache \ RUN apk update && apk add --no-cache \
gcc \ gcc \
musl-dev \ musl-dev \
libffi-dev \ libffi-dev \
openssl-dev \ openssl-dev \
cargo \ python3-dev \
rust build-base \
linux-headers \
# 设置工作目录
WORKDIR /app
# 复制依赖文件
COPY requirements.txt .
# 创建虚拟环境并安装依赖
RUN python -m venv /opt/venv
ENV PATH="/opt/venv/bin:$PATH"
RUN pip install --no-cache-dir --upgrade pip setuptools wheel
RUN pip install --no-cache-dir -r requirements.txt
RUN pip install --no-cache-dir fastapi uvicorn[standard]
# 运行阶段
FROM python:3.11-alpine AS runtime
# 安装运行时依赖
RUN apk add --no-cache \
curl \ curl \
bash \ bash \
tzdata tzdata && \
rm -rf /var/cache/apk/*
# 从构建阶段复制虚拟环境
COPY --from=builder /opt/venv /opt/venv
ENV PATH="/opt/venv/bin:$PATH"
# 设置工作目录 # 设置工作目录
WORKDIR /app WORKDIR /app
# 复制依赖文件并安装Python包
COPY requirements.txt .
RUN pip install --no-cache-dir --upgrade pip setuptools wheel && \
pip install --no-cache-dir -r requirements.txt && \
pip install --no-cache-dir fastapi uvicorn[standard]
# 复制应用代码 # 复制应用代码
COPY . . COPY . .
# 创建必要目录
RUN mkdir -p /app/logs /app/uploads /app/reports
# 创建非root用户 # 创建非root用户
RUN addgroup -g 1000 appuser && \ RUN addgroup -g 1000 appuser && \
adduser -D -u 1000 -G appuser appuser && \ adduser -D -u 1000 -G appuser appuser && \
@ -460,8 +432,12 @@ RUN addgroup -g 1000 appuser && \
USER appuser USER appuser
# 设置环境变量
ENV PYTHONPATH=/app
ENV PYTHONUNBUFFERED=1
# 健康检查 # 健康检查
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
CMD curl -f http://localhost:5051/health || exit 1 CMD curl -f http://localhost:5051/health || exit 1
# 暴露端口 # 暴露端口
@ -827,50 +803,38 @@ EOF
else else
# Flask版本的Dockerfile # Flask版本的Dockerfile
cat > "Dockerfile" << 'EOF' cat > "Dockerfile" << 'EOF'
# 多阶段构建 - 构建阶段 # 使用稳定的Python基础镜像
FROM python:3.11-alpine AS builder FROM python:3.11-alpine
# 安装构建依赖 # 安装系统依赖
RUN apk add --no-cache \ RUN apk update && apk add --no-cache \
gcc \ gcc \
musl-dev \ musl-dev \
libffi-dev \ libffi-dev \
openssl-dev \ openssl-dev \
cargo \ python3-dev \
rust build-base \
linux-headers \
# 设置工作目录
WORKDIR /app
# 复制依赖文件
COPY requirements.txt .
# 创建虚拟环境并安装依赖
RUN python -m venv /opt/venv
ENV PATH="/opt/venv/bin:$PATH"
RUN pip install --no-cache-dir --upgrade pip setuptools wheel
RUN pip install --no-cache-dir -r requirements.txt
RUN pip install --no-cache-dir flask gunicorn
# 运行阶段
FROM python:3.11-alpine AS runtime
# 安装运行时依赖
RUN apk add --no-cache \
curl \ curl \
bash \ bash \
tzdata tzdata && \
rm -rf /var/cache/apk/*
# 从构建阶段复制虚拟环境
COPY --from=builder /opt/venv /opt/venv
ENV PATH="/opt/venv/bin:$PATH"
# 设置工作目录 # 设置工作目录
WORKDIR /app WORKDIR /app
# 复制依赖文件并安装Python包
COPY requirements.txt .
RUN pip install --no-cache-dir --upgrade pip setuptools wheel && \
pip install --no-cache-dir -r requirements.txt && \
pip install --no-cache-dir flask gunicorn
# 复制应用代码 # 复制应用代码
COPY . . COPY . .
# 创建必要目录
RUN mkdir -p /app/logs /app/uploads /app/reports
# 创建非root用户 # 创建非root用户
RUN addgroup -g 1000 appuser && \ RUN addgroup -g 1000 appuser && \
adduser -D -u 1000 -G appuser appuser && \ adduser -D -u 1000 -G appuser appuser && \
@ -878,9 +842,14 @@ RUN addgroup -g 1000 appuser && \
USER appuser USER appuser
# 设置环境变量
ENV PYTHONPATH=/app
ENV FLASK_ENV=production
ENV PYTHONUNBUFFERED=1
# 健康检查 # 健康检查
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
CMD curl -f http://localhost:5050/health || exit 1 CMD curl -f http://localhost:5050/ || exit 1
# 暴露端口 # 暴露端口
EXPOSE 5050 EXPOSE 5050

View File

@ -87,46 +87,20 @@ class TestConfig(BaseModel):
"""测试配置模型""" """测试配置模型"""
# API定义源 (三选一) # API定义源 (三选一)
yapi: Optional[str] = Field(None, description="YAPI定义文件路径", example="./api_spec.json") yapi: Optional[str] = Field(None, description="YAPI定义文件路径", exclude=True)
swagger: Optional[str] = Field(None, description="Swagger/OpenAPI定义文件路径", example="./openapi.yaml") swagger: Optional[str] = Field(None, description="Swagger/OpenAPI定义文件路径", exclude=True)
dms: Optional[str] = Field(None, description="DMS服务发现的domain mapping文件路径", example="./assets/doc/dms/domain.json") dms: Optional[str] = Field("./assets/doc/dms/domain.json", description="DMS服务发现的domain mapping文件路径", example="./assets/doc/dms/domain.json")
# 基本配置 # 基本配置
base_url: str = Field(..., description="API基础URL", example="https://api.example.com") base_url: str = Field("https://www.dev.ideas.cnpc/", description="API基础URL", example="https://www.dev.ideas.cnpc/")
# 分页配置 # 分页配置
page_size: int = Field(1000, description="DMS API分页大小默认1000。较小的值可以减少内存使用", ge=1, le=10000) page_size: int = Field(10, description="DMS API分页大小默认10。较小的值可以减少内存使用", ge=1, le=10000)
page_no: int = Field(1, description="起始页码从1开始。可用于断点续传或跳过前面的页面", ge=1) page_no: int = Field(1, description="起始页码从1开始。可用于断点续传或跳过前面的页面", ge=1)
fetch_all_pages: bool = Field(True, description="是否获取所有页面。True=获取所有数据False=只获取指定页面") fetch_all_pages: bool = Field(False, description="是否获取所有页面。True=获取所有数据False=只获取指定页面")
# 过滤选项 # 过滤选项
categories: Optional[List[str]] = Field(None, description="YAPI分类列表", example=["用户管理", "订单系统"])
tags: Optional[List[str]] = Field(None, description="Swagger标签列表", example=["user", "order"])
strictness_level: str = Field("CRITICAL", description="测试严格等级", pattern="^(CRITICAL|HIGH|MEDIUM|LOW)$") strictness_level: str = Field("CRITICAL", description="测试严格等级", pattern="^(CRITICAL|HIGH|MEDIUM|LOW)$")
# SSL和安全
ignore_ssl: bool = Field(False, description="忽略SSL证书验证不推荐在生产环境使用")
# 输出配置
output: str = Field("./test_reports", description="测试报告输出目录")
generate_pdf: bool = Field(True, description="是否生成PDF报告")
# 自定义测试
custom_test_cases_dir: Optional[str] = Field(None, description="自定义测试用例目录路径")
stages_dir: Optional[str] = Field(None, description="自定义测试阶段目录路径")
# LLM配置
llm_api_key: Optional[str] = Field(None, description="LLM API密钥")
llm_base_url: Optional[str] = Field(None, description="LLM API基础URL")
llm_model_name: Optional[str] = Field("gpt-3.5-turbo", description="LLM模型名称")
use_llm_for_request_body: bool = Field(False, description="使用LLM生成请求体")
use_llm_for_path_params: bool = Field(False, description="使用LLM生成路径参数")
use_llm_for_query_params: bool = Field(False, description="使用LLM生成查询参数")
use_llm_for_headers: bool = Field(False, description="使用LLM生成请求头")
# 调试选项
verbose: bool = Field(False, description="启用详细日志输出")
@field_validator('base_url') @field_validator('base_url')
@classmethod @classmethod
def validate_base_url(cls, v): def validate_base_url(cls, v):
@ -239,7 +213,7 @@ def run_tests_logic(config: dict):
output_directory = base_output_dir / timestamp output_directory = base_output_dir / timestamp
output_directory.mkdir(parents=True, exist_ok=True) output_directory.mkdir(parents=True, exist_ok=True)
logger.info(f"Test reports will be saved to: {output_directory.resolve()}") logger.info(f"Test reports will be saved to: {output_directory.resolve()}")
print(f"config{config}")
# Initialize the orchestrator # Initialize the orchestrator
orchestrator = APITestOrchestrator( orchestrator = APITestOrchestrator(
base_url=config['base_url'], base_url=config['base_url'],
@ -291,9 +265,7 @@ def run_tests_logic(config: dict):
if test_summary and config.get('stages_dir') and parsed_spec: if test_summary and config.get('stages_dir') and parsed_spec:
logger.info(f"Executing API test stages from directory: {config['stages_dir']}") logger.info(f"Executing API test stages from directory: {config['stages_dir']}")
stage_summary = orchestrator.run_stages_from_spec(parsed_spec, config['stages_dir']) orchestrator.run_stages_from_spec(parsed_spec, test_summary)
if stage_summary:
test_summary.merge_stage_summary(stage_summary)
if test_summary: if test_summary:
# Save main summary # Save main summary
@ -344,29 +316,29 @@ def save_api_call_details_to_markdown(api_call_details: List[APICallDetail], out
f.write("# API调用详情\n\n") f.write("# API调用详情\n\n")
for i, detail in enumerate(api_call_details, 1): for i, detail in enumerate(api_call_details, 1):
f.write(f"## {i}. {detail.endpoint_name}\n\n") f.write(f" {i}. {detail.endpoint_name}\n\n")
f.write(f"**请求URL**: `{detail.request_url}`\n\n") f.write(f"请求URL: `{detail.request_url}`\n\n")
f.write(f"**请求方法**: `{detail.request_method}`\n\n") f.write(f"请求方法: `{detail.request_method}`\n\n")
if detail.request_headers: if detail.request_headers:
f.write("**请求头**:\n```json\n") f.write("请求头:\n```json\n")
f.write(json.dumps(detail.request_headers, indent=2, ensure_ascii=False)) f.write(json.dumps(detail.request_headers, indent=2, ensure_ascii=False))
f.write("\n```\n\n") f.write("\n```\n\n")
if detail.request_body: if detail.request_body:
f.write("**请求体**:\n```json\n") f.write("请求体:\n```json\n")
f.write(json.dumps(detail.request_body, indent=2, ensure_ascii=False)) f.write(json.dumps(detail.request_body, indent=2, ensure_ascii=False))
f.write("\n```\n\n") f.write("\n```\n\n")
f.write(f"**响应状态码**: `{detail.response_status_code}`\n\n") f.write(f"响应状态码: `{detail.response_status_code}`\n\n")
if detail.response_headers: if detail.response_headers:
f.write("**响应头**:\n```json\n") f.write("响应头:\n```json\n")
f.write(json.dumps(detail.response_headers, indent=2, ensure_ascii=False)) f.write(json.dumps(detail.response_headers, indent=2, ensure_ascii=False))
f.write("\n```\n\n") f.write("\n```\n\n")
if detail.response_body: if detail.response_body:
f.write("**响应体**:\n```json\n") f.write("响应体:\n```json\n")
f.write(json.dumps(detail.response_body, indent=2, ensure_ascii=False)) f.write(json.dumps(detail.response_body, indent=2, ensure_ascii=False))
f.write("\n```\n\n") f.write("\n```\n\n")
@ -383,16 +355,16 @@ def save_api_call_details_to_markdown(api_call_details: List[APICallDetail], out
执行API合规性测试的主要端点 执行API合规性测试的主要端点
支持三种API定义源 支持三种API定义源
- **YAPI**: 基于YAPI定义文件 - YAPI: 基于YAPI定义文件
- **Swagger/OpenAPI**: 基于OpenAPI规范文件 - Swagger/OpenAPI: 基于OpenAPI规范文件
- **DMS**: 动态发现DMS服务的API - DMS: 动态发现DMS服务的API
### 分页支持 分页支持
对于DMS测试支持分页获取API列表避免内存溢出 对于DMS测试支持分页获取API列表避免内存溢出
- `page_size`: 每页获取的API数量默认1000 - `page_size`: 每页获取的API数量默认1000
- 返回详细的分页统计信息 - 返回详细的分页统计信息
### LLM集成 LLM集成
可选择使用大语言模型生成测试数据 可选择使用大语言模型生成测试数据
- 智能生成请求体路径参数查询参数等 - 智能生成请求体路径参数查询参数等
- 提高测试覆盖率和数据多样性 - 提高测试覆盖率和数据多样性
@ -407,17 +379,36 @@ async def run_api_tests(config: TestConfig):
""" """
执行API合规性测试 执行API合规性测试
- **config**: 测试配置包含API定义源测试参数等 - config: 测试配置包含API定义源测试参数等
- **returns**: 测试结果包含摘要信息和分页信息如适用 - returns: 测试结果包含摘要信息和分页信息如适用
""" """
try: try:
logger.info(f"Starting test run with configuration: {config.model_dump()}") logger.info(f"Starting test run with configuration: {config.model_dump()}")
# Convert Pydantic model to dict for compatibility # Convert Pydantic model to dict for compatibility
config_dict = config.model_dump(exclude_none=True) config_dict = config.model_dump(exclude_none=True)
# Replace underscores with hyphens for compatibility with original code # Add hidden parameters with default values
config_dict = {k.replace('_', '-'): v for k, v in config_dict.items()} hidden_defaults = {
"categories": [],
"tags": [],
"ignore_ssl": True,
"output": "./test_reports",
"generate_pdf": True,
"custom_test_cases_dir": "./custom_testcases",
"stages_dir": "./custom_stages",
"llm_api_key": "sk-lbGrsUPL1iby86h554FaE536C343435dAa9bA65967A840B2",
"llm_base_url": "https://aiproxy.petrotech.cnpc/v1",
"llm_model_name": "deepseek-v3",
"use_llm_for_request_body": False,
"use_llm_for_path_params": False,
"use_llm_for_query_params": False,
"use_llm_for_headers": False,
"verbose": False
}
# Merge hidden defaults with config
config_dict.update(hidden_defaults)
result = run_tests_logic(config_dict) result = run_tests_logic(config_dict)
@ -456,8 +447,8 @@ async def download_report(report_id: str, file_type: str = "summary.json"):
""" """
下载测试报告文件 下载测试报告文件
- **report_id**: 报告ID通常是时间戳 - report_id: 报告ID通常是时间戳
- **file_type**: 文件类型可选值summary.json, api_call_details.md - file_type: 文件类型可选值summary.json, api_call_details.md
""" """
try: try:
report_dir = Path("./test_reports") / report_id report_dir = Path("./test_reports") / report_id

View File

@ -1,179 +0,0 @@
#!/bin/bash
# 测试目录创建和复制逻辑
set -e
echo "=== 测试目录创建和复制逻辑 ==="
# 模拟变量
SELECTED_SERVICE_ARCH="dual"
TARGET_PLATFORM="linux/arm64"
MULTI_PLATFORM=false
# 更新导出目录名称以包含平台信息
if [[ "$MULTI_PLATFORM" == "true" ]]; then
EXPORT_DIR="dms-compliance-${SELECTED_SERVICE_ARCH}-multiplatform-$(date +%Y%m%d-%H%M%S)"
else
platform_suffix=$(echo "$TARGET_PLATFORM" | sed 's/linux\///g' | sed 's/\//-/g')
EXPORT_DIR="dms-compliance-${SELECTED_SERVICE_ARCH}-${platform_suffix}-$(date +%Y%m%d-%H%M%S)"
fi
ARCHIVE_NAME="$EXPORT_DIR.tar.gz"
echo "[信息] 最终输出目录: $EXPORT_DIR"
# 创建最终导出目录
rm -rf "$EXPORT_DIR"
mkdir -p "$EXPORT_DIR"
echo "[步骤 1/4] 复制项目文件..."
# 创建临时构建目录
TEMP_BUILD_DIR=$(mktemp -d)
trap "rm -rf $TEMP_BUILD_DIR" EXIT
# 复制核心目录(排除缓存和临时文件)
echo "[信息] 复制核心目录..."
mkdir -p "$TEMP_BUILD_DIR"/{ddms_compliance_suite,custom_stages,custom_testcases,templates,static,assets}
# 创建测试文件
echo "# Test content" > "$TEMP_BUILD_DIR/requirements.txt"
echo "# Test API server" > "$TEMP_BUILD_DIR/api_server.py"
echo "# Test history viewer" > "$TEMP_BUILD_DIR/history_viewer.py"
echo "# Test ddms suite" > "$TEMP_BUILD_DIR/ddms_compliance_suite/test.py"
echo "[步骤 2/4] 创建 Dockerfile..."
cd "$TEMP_BUILD_DIR"
# 创建测试Dockerfile
cat > "Dockerfile" << 'EOF'
FROM python:3.11-alpine
WORKDIR /app
COPY . .
EXPOSE 5050 5051
CMD ["echo", "Test dockerfile"]
EOF
# 创建supervisor配置
cat > "supervisord.conf" << 'EOF'
[supervisord]
nodaemon=true
[program:api_server]
command=python api_server.py
[program:history_viewer]
command=python history_viewer.py
EOF
cd ..
echo "[步骤 3/4] 复制构建文件..."
# 确保目标目录存在
mkdir -p "$EXPORT_DIR"
cp -r "$TEMP_BUILD_DIR"/* "$EXPORT_DIR/"
echo "[步骤 4/4] 创建配置文件..."
# 创建Docker Compose文件
cat > "$EXPORT_DIR/docker-compose.yml" << EOF
version: '3.8'
services:
dms-compliance:
build:
context: .
dockerfile: Dockerfile
platforms:
- $TARGET_PLATFORM
image: compliance-dms-multiplatform:latest
container_name: dms-compliance-tool
ports:
- "5050:5050" # API服务器端口
- "5051:5051" # 历史查看器端口
environment:
- PYTHONPATH=/app
- TZ=Asia/Shanghai
- FLASK_ENV=production
- PYTHONUNBUFFERED=1
volumes:
- ./uploads:/app/uploads
- ./logs:/app/logs
- ./test_reports:/app/test_reports
- ./config:/app/config:ro
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:5050/"]
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
networks:
- dms-network
networks:
dms-network:
driver: bridge
volumes:
uploads:
logs:
test_reports:
config:
EOF
# 创建启动脚本
cat > "$EXPORT_DIR/start.sh" << 'EOF'
#!/bin/bash
echo "=== DMS合规性测试工具启动脚本 ==="
echo "这是一个测试脚本"
EOF
chmod +x "$EXPORT_DIR/start.sh"
# 创建README
cat > "$EXPORT_DIR/README.md" << EOF
# DMS合规性测试工具 - 测试部署包
## 系统信息
- **架构**: 双服务架构 - API服务器(5050) + 历史查看器(5051)
- **端口**: 5050,5051
- **目标平台**: ARM64 (aarch64) - Apple M1/M2, ARM 64位
- **构建时间**: $(date '+%Y-%m-%d %H:%M:%S')
## 测试成功
目录创建和文件复制逻辑正常工作。
EOF
# 验证文件结构
echo ""
echo "=== 验证文件结构 ==="
echo "导出目录内容:"
ls -la "$EXPORT_DIR"
echo ""
echo "核心文件检查:"
for file in Dockerfile docker-compose.yml start.sh README.md requirements.txt api_server.py history_viewer.py supervisord.conf; do
if [[ -f "$EXPORT_DIR/$file" ]]; then
echo "$file"
else
echo "$file (缺失)"
fi
done
# 创建压缩包
echo ""
echo "[信息] 创建压缩包..."
tar -czf "$ARCHIVE_NAME" "$EXPORT_DIR"
# 清理临时目录
rm -rf "$EXPORT_DIR"
# 显示结果
echo ""
echo "=== 测试完成 ==="
echo "[成功] 测试部署包已创建: $ARCHIVE_NAME"
echo "[信息] 文件大小: $(du -h "$ARCHIVE_NAME" | cut -f1)"
echo ""
echo "目录创建和复制逻辑测试成功!"

View File

@ -1,253 +0,0 @@
#!/bin/bash
# 测试多平台脚本的基本功能不构建Docker
set -e
# 配置变量
EXPORT_DIR="dms-compliance-multiplatform-test-$(date +%Y%m%d-%H%M%S)"
IMAGE_NAME="compliance-dms-multiplatform"
ARCHIVE_NAME="$EXPORT_DIR.tar.gz"
# 支持的平台列表 - 使用函数替代关联数组以兼容旧版bash
get_platform() {
case "$1" in
1) echo "linux/amd64" ;;
2) echo "linux/arm64" ;;
3) echo "linux/arm/v7" ;;
4) echo "linux/arm/v6" ;;
5) echo "linux/386" ;;
6) echo "linux/ppc64le" ;;
7) echo "linux/s390x" ;;
8) echo "linux/riscv64" ;;
*) echo "" ;;
esac
}
get_platform_name() {
case "$1" in
"linux/amd64") echo "AMD64 (x86_64) - Intel/AMD 64位" ;;
"linux/arm64") echo "ARM64 (aarch64) - Apple M1/M2, ARM 64位" ;;
"linux/arm/v7") echo "ARMv7 - 树莓派 3/4, ARM 32位" ;;
"linux/arm/v6") echo "ARMv6 - 树莓派 1/Zero, ARM 32位" ;;
"linux/386") echo "i386 - Intel/AMD 32位" ;;
"linux/ppc64le") echo "PowerPC 64位小端" ;;
"linux/s390x") echo "IBM System z" ;;
"linux/riscv64") echo "RISC-V 64位" ;;
*) echo "未知平台" ;;
esac
}
# 服务架构选择
get_service_arch() {
case "$1" in
1) echo "dual" ;;
2) echo "fastapi" ;;
3) echo "flask" ;;
*) echo "" ;;
esac
}
get_service_arch_name() {
case "$1" in
"dual") echo "双服务架构 - API服务器(5050) + 历史查看器(5051)" ;;
"fastapi") echo "FastAPI单服务 - 现代异步框架自动生成API文档(5051)" ;;
"flask") echo "Flask单服务 - 轻量级传统框架(5050)" ;;
*) echo "未知架构" ;;
esac
}
get_service_ports() {
case "$1" in
"dual") echo "5050,5051" ;;
"fastapi") echo "5051" ;;
"flask") echo "5050" ;;
*) echo "5050" ;;
esac
}
echo "=== DMS合规性测试工具 跨平台脚本测试 ==="
echo ""
# 检测当前平台
CURRENT_ARCH=$(uname -m)
case "$CURRENT_ARCH" in
x86_64|amd64) CURRENT_PLATFORM="linux/amd64" ;;
aarch64|arm64) CURRENT_PLATFORM="linux/arm64" ;;
armv7l) CURRENT_PLATFORM="linux/arm/v7" ;;
armv6l) CURRENT_PLATFORM="linux/arm/v6" ;;
i386|i686) CURRENT_PLATFORM="linux/386" ;;
*) CURRENT_PLATFORM="linux/amd64" ;;
esac
echo "[信息] 当前平台: $(get_platform_name "$CURRENT_PLATFORM")"
echo ""
# 选择服务架构
echo "请选择服务架构:"
echo " 1) $(get_service_arch_name "dual")"
echo " 2) $(get_service_arch_name "fastapi")"
echo " 3) $(get_service_arch_name "flask")"
echo ""
read -p "请输入选择 (1-3) [默认: 1]: " service_choice
service_choice=${service_choice:-1}
SELECTED_SERVICE_ARCH=$(get_service_arch "$service_choice")
if [[ -z "$SELECTED_SERVICE_ARCH" ]]; then
echo "[错误] 无效的服务架构选择"
exit 1
fi
SELECTED_PORTS=$(get_service_ports "$SELECTED_SERVICE_ARCH")
echo "[信息] 选择的架构: $(get_service_arch_name "$SELECTED_SERVICE_ARCH")"
echo "[信息] 服务端口: $SELECTED_PORTS"
echo ""
# 选择目标平台
echo "请选择目标平台架构:"
for key in 1 2 3 4 5 6 7 8; do
platform=$(get_platform "$key")
name=$(get_platform_name "$platform")
if [[ "$platform" == "$CURRENT_PLATFORM" ]]; then
echo " $key) $name [当前平台]"
else
echo " $key) $name"
fi
done
echo " 9) 多平台构建 (同时构建多个平台)"
echo " 0) 自动检测当前平台"
echo ""
read -p "请输入选择 (0-9) [默认: 0]: " platform_choice
platform_choice=${platform_choice:-0}
if [[ "$platform_choice" == "0" ]]; then
TARGET_PLATFORM="$CURRENT_PLATFORM"
TARGET_PLATFORM_NAME="$(get_platform_name "$CURRENT_PLATFORM") [自动检测]"
MULTI_PLATFORM=false
elif [[ "$platform_choice" == "9" ]]; then
TARGET_PLATFORM="linux/amd64,linux/arm64"
TARGET_PLATFORM_NAME="常用平台 (AMD64 + ARM64)"
MULTI_PLATFORM=true
else
TARGET_PLATFORM=$(get_platform "$platform_choice")
if [[ -z "$TARGET_PLATFORM" ]]; then
echo "[错误] 无效的平台选择"
exit 1
fi
TARGET_PLATFORM_NAME=$(get_platform_name "$TARGET_PLATFORM")
MULTI_PLATFORM=false
fi
echo "[信息] 目标平台: $TARGET_PLATFORM_NAME"
echo "[信息] 多平台构建: $MULTI_PLATFORM"
echo ""
# 确认构建
echo "构建配置确认:"
echo " 架构: $(get_service_arch_name "$SELECTED_SERVICE_ARCH")"
echo " 端口: $SELECTED_PORTS"
echo " 平台: $TARGET_PLATFORM_NAME"
echo ""
read -p "确认开始测试? (y/N): " confirm
if [[ ! "$confirm" =~ ^[Yy]$ ]]; then
echo "[信息] 测试已取消"
exit 0
fi
# 更新导出目录名称以包含平台信息
if [[ "$MULTI_PLATFORM" == "true" ]]; then
EXPORT_DIR="dms-compliance-${SELECTED_SERVICE_ARCH}-multiplatform-$(date +%Y%m%d-%H%M%S)"
else
platform_suffix=$(echo "$TARGET_PLATFORM" | sed 's/linux\///g' | sed 's/\//-/g')
EXPORT_DIR="dms-compliance-${SELECTED_SERVICE_ARCH}-${platform_suffix}-$(date +%Y%m%d-%H%M%S)"
fi
ARCHIVE_NAME="$EXPORT_DIR.tar.gz"
echo "[信息] 最终输出目录: $EXPORT_DIR"
# 创建最终导出目录
rm -rf "$EXPORT_DIR"
mkdir -p "$EXPORT_DIR"
echo ""
echo "[步骤 1/3] 创建测试文件..."
# 创建临时构建目录
TEMP_BUILD_DIR=$(mktemp -d)
trap "rm -rf $TEMP_BUILD_DIR" EXIT
# 创建测试文件
mkdir -p "$TEMP_BUILD_DIR"/{ddms_compliance_suite,custom_stages,templates,static}
echo "# Test requirements" > "$TEMP_BUILD_DIR/requirements.txt"
echo "# Test API server" > "$TEMP_BUILD_DIR/api_server.py"
echo "# Test history viewer" > "$TEMP_BUILD_DIR/history_viewer.py"
echo "[步骤 2/3] 创建配置文件..."
# 创建测试Dockerfile
if [[ "$SELECTED_SERVICE_ARCH" == "dual" ]]; then
cat > "$TEMP_BUILD_DIR/Dockerfile" << 'EOF'
FROM python:3.11-alpine
WORKDIR /app
COPY . .
EXPOSE 5050 5051
CMD ["echo", "Dual service architecture"]
EOF
cat > "$TEMP_BUILD_DIR/supervisord.conf" << 'EOF'
[supervisord]
nodaemon=true
[program:api_server]
command=python api_server.py
[program:history_viewer]
command=python history_viewer.py
EOF
else
cat > "$TEMP_BUILD_DIR/Dockerfile" << 'EOF'
FROM python:3.11-alpine
WORKDIR /app
COPY . .
EXPOSE 5050
CMD ["echo", "Single service architecture"]
EOF
fi
echo "[步骤 3/3] 复制文件到最终目录..."
# 复制构建文件到最终目录
cp -r "$TEMP_BUILD_DIR"/* "$EXPORT_DIR/"
# 创建README
cat > "$EXPORT_DIR/README.md" << EOF
# DMS合规性测试工具 - 测试部署包
## 配置信息
- **架构**: $(get_service_arch_name "$SELECTED_SERVICE_ARCH")
- **端口**: $SELECTED_PORTS
- **目标平台**: $TARGET_PLATFORM_NAME
- **构建时间**: $(date '+%Y-%m-%d %H:%M:%S')
## 测试成功
这是一个测试部署包,验证了脚本的基本功能。
EOF
# 创建压缩包
echo "[信息] 创建压缩包..."
tar -czf "$ARCHIVE_NAME" "$EXPORT_DIR"
# 显示结果
echo ""
echo "=== 测试完成 ==="
echo "[成功] 测试部署包已创建: $ARCHIVE_NAME"
echo "[信息] 架构: $(get_service_arch_name "$SELECTED_SERVICE_ARCH")"
echo "[信息] 端口: $SELECTED_PORTS"
echo "[信息] 平台: $TARGET_PLATFORM_NAME"
echo "[信息] 文件大小: $(du -h "$ARCHIVE_NAME" | cut -f1)"
echo ""
echo "测试成功!脚本功能正常。"