package main import ( "bytes" "encoding/json" "fmt" "io" "net/http" "testing" "time" "git.x2erp.com/qdy/go-svc-configure/internal/service/dicmanagement" ) const ( performanceBaseURL = "http://localhost:8080" ) // getPerfAuthToken 获取性能测试认证token func getPerfAuthToken(t *testing.T) string { // 使用同一包中已存在的函数 userToken, err := getUserAuthToken(t) if err != nil { t.Fatalf("获取用户认证token失败: %v", err) } configToken, err := createConfigToken(t, userToken) if err != nil { t.Fatalf("创建配置token失败: %v", err) } return configToken } // TestBatchPerformance 测试批量保存性能 - 10个表,100个字段 func TestBatchPerformance(t *testing.T) { // 1. 批量创建10个表,每个表10个字段 t.Run("BatchCreate10Tables100Fields", func(t *testing.T) { testBatchCreateLargeDataset(t, 10, 10) }) // 2. 批量更新部分数据 t.Run("BatchUpdatePartialData", func(t *testing.T) { testBatchUpdatePartial(t, 10, 10) }) // 3. 清理测试数据 t.Run("CleanupPerformanceTestData", func(t *testing.T) { cleanupPerformanceTestData(t, 10) }) } // testBatchCreateLargeDataset 测试创建大规模数据集 func testBatchCreateLargeDataset(t *testing.T, tableCount, fieldsPerTable int) { httpClient := &http.Client{} token := getPerfAuthToken(t) url := performanceBaseURL + "/api/dic-table/batch-save" // 生成测试数据 startTime := time.Now() reqBody := generateLargeBatchRequest(tableCount, fieldsPerTable) generateTime := time.Since(startTime) t.Logf("生成 %d 个表,%d 个字段数据耗时: %v", tableCount, tableCount*fieldsPerTable, generateTime) jsonData, err := json.Marshal(reqBody) if err != nil { t.Fatalf("JSON序列化失败: %v", err) } req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonData)) if err != nil { t.Fatalf("创建请求失败: %v", err) } req.Header.Set("Authorization", "Bearer "+token) req.Header.Set("Content-Type", "application/json") // 执行请求并计时 startTime = time.Now() resp, err := httpClient.Do(req) if err != nil { t.Fatalf("请求失败: %v", err) } defer resp.Body.Close() body, err := io.ReadAll(resp.Body) if err != nil { t.Fatalf("读取响应失败: %v", err) } executeTime := time.Since(startTime) var result map[string]interface{} if err := json.Unmarshal(body, &result); err != nil { t.Fatalf("JSON解析失败: %v", err) } if success, ok := result["success"].(bool); !ok || !success { t.Errorf("批量创建表失败: %v", result) } else { t.Logf("批量创建 %d 个表,%d 个字段成功,耗时: %v", tableCount, tableCount*fieldsPerTable, executeTime) t.Logf("总数据量: %d bytes", len(jsonData)) } } // testBatchUpdatePartial 测试批量更新部分数据 func testBatchUpdatePartial(t *testing.T, tableCount, fieldsPerTable int) { httpClient := &http.Client{} token := getPerfAuthToken(t) url := performanceBaseURL + "/api/dic-table/batch-save" // 生成更新数据:更新前5个表,每个表更新5个字段,新增5个字段 startTime := time.Now() reqBody := generateUpdateBatchRequest(tableCount, fieldsPerTable) generateTime := time.Since(startTime) t.Logf("生成更新数据耗时: %v", generateTime) jsonData, err := json.Marshal(reqBody) if err != nil { t.Fatalf("JSON序列化失败: %v", err) } req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonData)) if err != nil { t.Fatalf("创建请求失败: %v", err) } req.Header.Set("Authorization", "Bearer "+token) req.Header.Set("Content-Type", "application/json") // 执行请求并计时 startTime = time.Now() resp, err := httpClient.Do(req) if err != nil { t.Fatalf("请求失败: %v", err) } defer resp.Body.Close() body, err := io.ReadAll(resp.Body) if err != nil { t.Fatalf("读取响应失败: %v", err) } executeTime := time.Since(startTime) var result map[string]interface{} if err := json.Unmarshal(body, &result); err != nil { t.Fatalf("JSON解析失败: %v", err) } if success, ok := result["success"].(bool); !ok || !success { t.Errorf("批量更新表失败: %v", result) } else { t.Logf("批量更新成功,耗时: %v", executeTime) t.Logf("更新数据量: %d bytes", len(jsonData)) } } // generateLargeBatchRequest 生成大规模批量保存请求 func generateLargeBatchRequest(tableCount, fieldsPerTable int) dicmanagement.BatchSaveDicTablesRequest { tables := make([]dicmanagement.DicTableRequest, 0, tableCount) fields := make([]dicmanagement.DicTableFieldRequest, 0, tableCount*fieldsPerTable) // 生成表数据 for i := 1; i <= tableCount; i++ { tableID := fmt.Sprintf("perf_table_%03d", i) table := dicmanagement.DicTableRequest{ TableID: tableID, TableType: "实体表", Name: fmt.Sprintf("性能测试表%03d", i), Description: fmt.Sprintf("性能测试表%03d的描述", i), } tables = append(tables, table) // 生成字段数据 for j := 1; j <= fieldsPerTable; j++ { fieldID := fmt.Sprintf("%s.field_%03d", tableID, j) field := dicmanagement.DicTableFieldRequest{ FieldID: fieldID, TableID: tableID, FiledType: "实际字段", DataType: getDataType(j), FieldName: fmt.Sprintf("field_%03d", j), FieldNameCN: fmt.Sprintf("字段%03d", j), Description: fmt.Sprintf("表%s的第%03d个字段", tableID, j), } fields = append(fields, field) } } return dicmanagement.BatchSaveDicTablesRequest{ Tables: tables, Fields: fields, } } // generateUpdateBatchRequest 生成更新批量请求 func generateUpdateBatchRequest(tableCount, fieldsPerTable int) dicmanagement.BatchSaveDicTablesRequest { tables := make([]dicmanagement.DicTableRequest, 0, tableCount/2) fields := make([]dicmanagement.DicTableFieldRequest, 0, (tableCount/2)*fieldsPerTable) // 只更新前5个表 for i := 1; i <= 5 && i <= tableCount; i++ { tableID := fmt.Sprintf("perf_table_%03d", i) table := dicmanagement.DicTableRequest{ TableID: tableID, TableType: "实体表", Name: fmt.Sprintf("更新后的性能测试表%03d", i), Description: fmt.Sprintf("更新后的性能测试表%03d的描述", i), } tables = append(tables, table) // 更新前5个字段,新增5个字段 for j := 1; j <= fieldsPerTable; j++ { fieldID := fmt.Sprintf("%s.field_%03d", tableID, j) fieldNameCN := fmt.Sprintf("字段%03d", j) if j <= 5 { // 更新前5个字段的描述 fieldNameCN = fmt.Sprintf("更新后的字段%03d", j) } field := dicmanagement.DicTableFieldRequest{ FieldID: fieldID, TableID: tableID, FiledType: "实际字段", DataType: getDataType(j), FieldName: fmt.Sprintf("field_%03d", j), FieldNameCN: fieldNameCN, Description: fmt.Sprintf("表%s的第%03d个字段(已更新)", tableID, j), } fields = append(fields, field) } } return dicmanagement.BatchSaveDicTablesRequest{ Tables: tables, Fields: fields, } } // getDataType 根据字段序号返回数据类型 func getDataType(fieldNum int) string { dataTypes := []string{"字符型", "数值型", "日期型", "布尔型", "文本型"} return dataTypes[fieldNum%len(dataTypes)] } // cleanupPerformanceTestData 清理性能测试数据 func cleanupPerformanceTestData(t *testing.T, tableCount int) { httpClient := &http.Client{} for i := 1; i <= tableCount; i++ { deletePerfTable(t, httpClient, fmt.Sprintf("perf_table_%03d", i)) } } // deletePerfTable 删除性能测试表 func deletePerfTable(t *testing.T, client *http.Client, tableID string) { token := getPerfAuthToken(t) url := performanceBaseURL + "/api/dic-table/delete/" + tableID req, err := http.NewRequest("POST", url, nil) if err != nil { t.Errorf("创建删除请求失败: %v", err) return } req.Header.Set("Authorization", "Bearer "+token) req.Header.Set("Content-Type", "application/json") resp, err := client.Do(req) if err != nil { t.Errorf("删除请求失败: %v", err) return } defer resp.Body.Close() body, err := io.ReadAll(resp.Body) if err != nil { t.Errorf("读取响应失败: %v", err) return } var result map[string]interface{} if err := json.Unmarshal(body, &result); err != nil { t.Errorf("JSON解析失败: %v", err) return } if success, ok := result["success"].(bool); !ok || !success { t.Errorf("删除表 %s 失败: %v", tableID, result) } else { t.Logf("删除表 %s 成功", tableID) } } // TestBatchPerformanceLarge 测试更大规模的批量保存性能 - 20个表,300个字段 func TestBatchPerformanceLarge(t *testing.T) { // 跳过长时间测试,除非显式运行 if testing.Short() { t.Skip("跳过大规模性能测试") } tableCount := 20 fieldsPerTable := 15 totalFields := tableCount * fieldsPerTable t.Run(fmt.Sprintf("BatchCreate%vTables%vFields", tableCount, totalFields), func(t *testing.T) { testBatchCreateLargeDataset(t, tableCount, fieldsPerTable) }) t.Run(fmt.Sprintf("BatchUpdate%vTables", tableCount/2), func(t *testing.T) { testBatchUpdatePartial(t, tableCount, fieldsPerTable) }) // 清理测试数据 t.Run(fmt.Sprintf("Cleanup%vTables", tableCount), func(t *testing.T) { cleanupPerformanceTestData(t, tableCount) }) }