package test_data import ( "bytes" "encoding/csv" "encoding/json" "errors" "fmt" "gitee.com/golang-module/carbon/v2" "github.com/extrame/xls" "github.com/xuri/excelize/v2" "golang.org/x/text/encoding/simplifiedchinese" "golang.org/x/text/transform" "gorm.io/gorm" "io/ioutil" "log" "os" "path" "path/filepath" "strings" "testData/global" "testData/model" "testData/utils" ) func HandleSTS8200Excel(fileText *model.FileText) error { rows := make([][]string, 0) if strings.ToLower(path.Ext(fileText.Name)) == ".csv" { csvBytes, err := ioutil.ReadFile(fileText.Path) if err != nil { log.Println("open err:", err) return errors.New(fmt.Sprintf("open err:%s", err)) } //解决读取csv中文乱码的问题 reader := csv.NewReader(transform.NewReader(bytes.NewReader(csvBytes), simplifiedchinese.GBK.NewDecoder())) //reader := csv.NewReader(file) reader.FieldsPerRecord = -1 csvRows, err := reader.ReadAll() if err != nil { log.Println("open err:", err) return errors.New(fmt.Sprintf("open err:%s", err)) } rows = csvRows } else if strings.ToLower(path.Ext(fileText.Name)) == ".xls" { if xlFile, err := xls.Open(fileText.Path, "utf-8"); err == nil { //第一个sheet sheet := xlFile.GetSheet(0) if sheet.MaxRow != 0 { temp := make([][]string, 0) for i := 0; i < int(sheet.MaxRow); i++ { row := sheet.Row(i) if row == nil { continue } data := make([]string, 0) if row.LastCol() > 0 { for j := 0; j < row.LastCol(); j++ { col := row.Col(j) data = append(data, col) } temp = append(temp, data) } } rows = append(rows, temp...) } else { log.Println("open err:", err) return errors.New(fmt.Sprintf("open err:%s", err)) } } } else if strings.ToLower(path.Ext(fileText.Name)) == ".xlsx" { xlsxFile, err := excelize.OpenFile(fileText.Path) if err != nil { log.Println("open err:", err) return errors.New(fmt.Sprintf("open err:%s", err)) } sheetName := xlsxFile.GetSheetName(0) rows, err = xlsxFile.GetRows(sheetName) if err != nil { log.Println("xlsx读取数据失败:", err) return errors.New(fmt.Sprintf("xlsx读取数据失败:%s", err)) } } details := make(map[string]string) newExcel := make([][]string, 0) var title []string titleInfoMap := make(map[string]model.DataInfo) //sBinMap := make(map[string]model.BinInfo) sBinMap := make(map[string]string) paramsMap := make(map[string]string) sbinStartIndex, sbinEndIndex := -1, -1 titleIndex, unitIndex, limitLIndex, limitUIndex, dataIndex := -1, -1, -1, -1, -1 for i := 1; i < len(rows); i++ { if rows[i][0] == "" { continue } if i == len(rows)-1 && unitIndex == -1 { log.Println("特殊文件格式,文件路径为:", fileText.Path) return errors.New(fmt.Sprintf("特殊文件格式,文件路径为:%s", fileText.Path)) } if sbinEndIndex < 0 { if strings.Contains(rows[i][0], "SBin") { if sbinStartIndex < 0 { sbinStartIndex = i } s := rows[i][0] if strings.Index(s, "]") != -1 { sbinNumStartIndex := strings.Index(s, "[") sbinNumEndIndex := strings.Index(s, "]") var startIndex, endIndex int for index := sbinNumEndIndex + 1; index < len(s); index++ { if startIndex == 0 { if s[index] != ' ' { startIndex = index } } else { if s[index] == ' ' || s[index] == '_' { endIndex = index break } } } //sBinMap[s[sbinNumStartIndex+1:sbinNumEndIndex]] = model.BinInfo{ // Name: s[startIndex:endIndex], // HBin: s[strings.LastIndex(s, " ")+1:], //} sBinMap[s[sbinNumStartIndex+1:sbinNumEndIndex]] = s[strings.LastIndex(s, " ")+1:] paramsMap[s[startIndex:endIndex]] = s[sbinNumStartIndex+1 : sbinNumEndIndex] } } else { if sbinStartIndex > 0 { sbinEndIndex = i continue } } } //if len(sBinMap) == 0 { rows[i][0] = strings.ReplaceAll(rows[i][0], ":", ":") splitIndex := strings.Index(rows[i][0], ":") if splitIndex != -1 { details[rows[i][0][:splitIndex]] = rows[i][0][splitIndex+1:] } //} if titleIndex < 0 { if strings.Contains(rows[i][0], "SITE_NUM") { titleIndex = i title = rows[i] continue } } else { if unitIndex < 0 { if strings.Contains(rows[i][0], "Unit") { rows[i] = append(rows[i], utils.FillData(len(rows[titleIndex])-len(rows[i]))...) unitIndex = i continue } } if limitLIndex < 0 { if strings.Contains(rows[i][0], "LimitL") { rows[i] = append(rows[i], utils.FillData(len(rows[titleIndex])-len(rows[i]))...) limitLIndex = i continue } } if limitUIndex < 0 { if strings.Contains(rows[i][0], "LimitU") { rows[i] = append(rows[i], utils.FillData(len(rows[titleIndex])-len(rows[i]))...) limitUIndex = i continue } } if dataIndex < 0 && limitUIndex > 0 { if rows[i][0] != rows[i][2] { dataIndex = i break } } } } if titleIndex != -1 { for k, v := range rows[titleIndex] { titleInfoMap[v] = model.DataInfo{ Unit: rows[unitIndex][k], LimitL: rows[limitLIndex][k], LimitU: rows[limitUIndex][k], } } for i := dataIndex; i < len(rows); i++ { if len(rows[i]) < len(rows[titleIndex]) { rows[i] = append(rows[i], utils.FillData(len(rows[titleIndex])-len(rows[i]))...) } newExcel = append(newExcel, rows[i]) } } else { log.Println("特殊文件格式,文件路径为:", fileText.Path) return errors.New(fmt.Sprintf("特殊文件格式,文件路径为:%s", fileText.Path)) } //if details["Test_Code"] == "" { // details["Test_Code"] = step //} else { // details["Test_Code"] = details["Test_Code"][:2] //} //strReader := transform.NewReader(bytes.NewReader([]byte(details["Device_Name"])), simplifiedchinese.GBK.NewDecoder()) //product, _ := ioutil.ReadAll(strReader) //if string(product) == "" { // product = []byte(fileText.ProductName) //} testProgram := details["Program"][strings.LastIndex(details["Program"], "\\")+1:] sbinHbin, _ := json.Marshal(&sBinMap) titleInfo, _ := json.Marshal(&titleInfoMap) paramsSbin, _ := json.Marshal(¶msMap) var fileName string if fileText.Procedure == "CP" { fileName = fileText.ProductName + "_" + fileText.Lot + "_" + fileText.ChipNum + "_" + fileText.Procedure + ".csv" } else { fileName = fileText.ProductName + "_" + fileText.Lot + "_" + fileText.SubBatch + "_" + fileText.Procedure + ".csv" } //fileName := string(product) + "_" + fileText.Lot + "_" + details["Test_Code"] + ".csv" //var waferID string //if fileText.Factory == "saimeike" { // if step == "CP1" || step == "CP2" { // length := len(fileText.Name) // waferID = fileText.Name[length-10 : length-8] // fileName = string(product) + "_" + fileText.Lot + "_" + waferID + "_" + details["Test_Code"] + ".csv" // } else { // if strings.Index(fileText.SubBatch, "-") != strings.LastIndex(fileText.SubBatch, "-") { // fileText.SubBatch = fileText.SubBatch[:strings.LastIndex(fileText.SubBatch, "-")] // } // fileName = string(product) + "_" + fileText.Lot + "_" + fileText.SubBatch + "_" + details["Test_Code"] + ".csv" // } //} else if fileText.Factory == "xinde" { // s := strings.Split(fileText.Name, "_") // if len(s) < 5 { // log.Println("文件名格式有误,文件路径为:", fileText.Path) // return // } // waferIDStr := s[len(s)-3] // if len(waferIDStr) < 5 { // log.Println("文件名格式有误,文件路径为:", fileText.Path) // return // } // splitIndex := strings.Index(waferIDStr, "-") // if splitIndex == -1 { // log.Println("文件名格式有误,文件路径为:", fileText.Path) // return // } // waferID = waferIDStr[splitIndex+1 : splitIndex+3] // if _, ok := details["WAFER_ID"]; ok { // details["Test_Code"] = "CP" // if len(waferIDStr[:splitIndex]) < 8 { // fileText.Lot = strings.ToUpper(waferIDStr[:splitIndex]) // } // fileText.SubBatch = "" // fileName = string(product) + "_" + fileText.Lot + "_" + waferID + "_" + details["Test_Code"] + ".csv" // } //} else if fileText.Factory == "qipai" { // //details["Test_Code"] //} else if fileText.Factory == "changdian" { // if details["Device_Name"] == "" { // log.Println("文件缺少产品型号信息,文件路径为:", fileText.Path) // return // } // product = []byte(details["Device_Name"]) // fileName = details["Device_Name"] + "_" + fileText.Lot + "_" + fileText.SubBatch + "_" + details["Test_Code"] + ".csv" //} //strings.ReplaceAll(fileText.Lot, ".", "-") dirPath := filepath.Join("/testData/online/" + fileText.Factory) utils.MakeDir(dirPath) filePath := filepath.Join(dirPath, fileName) var fileHandled *model.FileHandled if _, err := os.Stat(filePath); err != nil { if os.IsNotExist(err) { newExcel = append([][]string{title}, newExcel...) _, err = os.Create(filePath) if err != nil { log.Println("创建文件失败:", err) return errors.New(fmt.Sprintf("创建文件失败:%s", err)) } //if fileText.ChipNum != "" { // waferID = fileText.ChipNum //} if errors.Is(global.PostGreSQL.Where("name = ?", fileName).First(&fileHandled).Error, gorm.ErrRecordNotFound) { //global.PostGreSQL.Create(&model.FileHandled{ // Name: fileName, // Path: filePath, // Size: "", // Product: string(product), // PBI: fileText.PBI, // Factory: fileText.Factory, // Step: details["Test_Code"], // Lot: fileText.Lot, // SubBatch: fileText.SubBatch, // TestMachineModel: "STS8200", // TestMachine: details["Tester ID"], // TestProgram: testProgram, // BeginningTime: details["Beginning Time"], // EndingTime: details["Ending Time"], // SbinHbin: string(sbinHbin), // ParamsSbin: string(paramsSbin), // TitleInfo: string(titleInfo), // WaferID: waferID, //}) global.PostGreSQL.Create(&model.FileHandled{ Name: fileName, Path: filePath, Size: "", Product: fileText.ProductName, PBI: fileText.PBI, Factory: fileText.Factory, Step: fileText.Procedure, Lot: fileText.Lot, SubBatch: fileText.SubBatch, TestMachineModel: "STS8200", TestMachine: details["Tester ID"], TestProgram: testProgram, BeginningTime: details["Beginning Time"], EndingTime: details["Ending Time"], SbinHbin: string(sbinHbin), ParamsSbin: string(paramsSbin), TitleInfo: string(titleInfo), WaferID: fileText.ChipNum, }) } } } newCsv, err := os.OpenFile(filePath, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0644) if err != nil { log.Println("打开新Excel错误:", fileText.Path) return errors.New(fmt.Sprintf("打开新Excel错误:%s", fileText.Path)) } defer newCsv.Close() writer := csv.NewWriter(newCsv) defer writer.Flush() err = writer.WriteAll(newExcel) if err != nil { log.Println("写入新Excel错误:", fileText.Path) return errors.New(fmt.Sprintf("写入新Excel错误:%s", fileText.Path)) } newFile, err := os.Stat(filePath) if err != nil { log.Println("获取新Excel信息:", fileText.Path) return errors.New(fmt.Sprintf("获取新Excel信息:%s", fileText.Path)) } global.PostGreSQL.Model(&fileHandled).Where("name = ?", fileName).Updates(map[string]interface{}{ "product": fileText.ProductName, "step": fileText.Procedure, //"product": string(product), //"step": details["Test_Code"], "test_machine_model": "STS8200", "test_machine": details["Tester ID"], "test_program": testProgram, "beginning_time": details["Beginning Time"], "ending_time": details["Ending Time"], "sbin_hbin": string(sbinHbin), "params_sbin": string(paramsSbin), "title_info": string(titleInfo), "size": utils.FormatFileSize(float64(newFile.Size())), }) var report *model.Report var step string if fileText.Procedure == "FT" || fileText.Procedure == "RT" { step = "FT" } else if strings.Contains(fileText.Procedure, "CP") { step = "CP" } if errors.Is(global.PostGreSQL.Where("product = ? AND pbi = ? AND factory = ? AND lot = ? AND step = ? AND sub_batch = ? AND wafer_id = ?", fileText.ProductName, fileText.PBI, fileText.Factory, fileText.Lot, step, fileText.SubBatch, fileText.ChipNum).First(&report).Error, gorm.ErrRecordNotFound) { orderDate := carbon.Parse("20" + fileText.PBI[5:11]).Format("Y-m-d") report = &model.Report{ Product: fileText.ProductName, PBI: fileText.PBI, Factory: fileText.Factory, Step: step, Lot: fileText.Lot, SubBatch: fileText.SubBatch, TestMachine: details["Tester ID"], TestProgram: testProgram, WaferID: fileText.ChipNum, OrderDate: orderDate, } global.PostGreSQL.Create(&report) } if strings.Contains(fileText.Procedure, "CP") { SaveCP(report) } else { SaveFT(report) } return nil }