package test_data import ( "bytes" "encoding/csv" "encoding/json" "errors" "gitee.com/golang-module/carbon/v2" "github.com/extrame/xls" "github.com/xuri/excelize/v2" "golang.org/x/text/encoding/simplifiedchinese" "golang.org/x/text/transform" "gorm.io/gorm" "io/ioutil" "log" "os" "path" "path/filepath" "strings" "testData/global" "testData/model" "testData/utils" ) func MT737(summary, dataLog *model.FileText) { log.Println(summary.Name, dataLog.Name) summaryRows := make([][]string, 0) if strings.ToLower(path.Ext(summary.Name)) == ".csv" { csvBytes, err := ioutil.ReadFile(summary.Path) if err != nil { log.Println("open err:", err) return } //解决读取csv中文乱码的问题 reader := csv.NewReader(transform.NewReader(bytes.NewReader(csvBytes), simplifiedchinese.GBK.NewDecoder())) //reader := csv.NewReader(file) reader.FieldsPerRecord = -1 csvRows, err := reader.ReadAll() if err != nil { log.Println("open err:", err) return } summaryRows = csvRows } else if strings.ToLower(path.Ext(summary.Name)) == ".xls" { if xlFile, err := xls.Open(summary.Path, "utf-8"); err == nil { //第一个sheet sheet := xlFile.GetSheet(0) if sheet.MaxRow != 0 { temp := make([][]string, 0) for i := 0; i < int(sheet.MaxRow); i++ { row := sheet.Row(i) if row == nil { continue } data := make([]string, 0) if row.LastCol() > 0 { for j := 0; j < row.LastCol(); j++ { col := row.Col(j) data = append(data, col) } temp = append(temp, data) } } summaryRows = append(summaryRows, temp...) } else { log.Println("open err:", err) return } } } else if strings.ToLower(path.Ext(summary.Name)) == ".xlsx" { xlsxFile, err := excelize.OpenFile(summary.Path) if err != nil { log.Println("open err:", err) return } sheetName := xlsxFile.GetSheetName(0) summaryRows, err = xlsxFile.GetRows(sheetName) if err != nil { log.Println("xlsx读取数据失败:", err) return } } details := make(map[string]string) sBinMap := make(map[string]string) sbinStartIndex, sbinEndIndex := -1, -1 for i := 1; i < len(summaryRows); i++ { if summaryRows[i][0] == "" { continue } summaryRows[i][0] = strings.ReplaceAll(summaryRows[i][0], ":", ":") summaryRows[i][0] = strings.ReplaceAll(summaryRows[i][0], ": ", ":") //splitIndex := strings.Index(summaryRows[i][0], ":") //if splitIndex != -1 { // details[summaryRows[i][0][:splitIndex]] = summaryRows[i][0][splitIndex+1:] //} if strings.Contains(summaryRows[i][0], "Program:") { startIndex := strings.Index(summaryRows[i][0], "Program:") + 8 for endIndex := startIndex; endIndex < len(summaryRows[i][0]); endIndex++ { if string(summaryRows[i][0][endIndex]) == " " || endIndex == len(summaryRows[i][0])-1 { details["Program"] = summaryRows[i][0][startIndex : endIndex+1] break } } } if strings.Contains(summaryRows[i][0], "Tester_NO:") { startIndex := strings.Index(summaryRows[i][0], "Tester_NO:") + 10 for endIndex := startIndex; endIndex < len(summaryRows[i][0]); endIndex++ { if string(summaryRows[i][0][endIndex]) == " " || endIndex == len(summaryRows[i][0])-1 { details["Tester_NO"] = summaryRows[i][0][startIndex : endIndex+1] break } } } if strings.Contains(summaryRows[i][0], "Start:") { startIndex := strings.Index(summaryRows[i][0], "Start:") + 6 for endIndex := startIndex; endIndex < len(summaryRows[i][0]); endIndex++ { if endIndex == len(summaryRows[i][0])-1 || string(summaryRows[i][0][endIndex:endIndex+2]) == " " { details["Start"] = carbon.Parse(summaryRows[i][0][startIndex:endIndex]).Format("Y-m-d H:i:s") break } } } if strings.Contains(summaryRows[i][0], "End:") { startIndex := strings.Index(summaryRows[i][0], "End:") + 4 for endIndex := startIndex; endIndex < len(summaryRows[i][0]); endIndex++ { if endIndex == len(summaryRows[i][0])-1 || string(summaryRows[i][0][endIndex:endIndex+2]) == " " { details["End"] = carbon.Parse(summaryRows[i][0][startIndex : endIndex+1]).Format("Y-m-d H:i:s") break } } } if sbinEndIndex < 0 { if strings.Contains(summaryRows[i][0], "SBin-HBin") { if sbinStartIndex < 0 { sbinStartIndex = i + 2 continue } } if sbinStartIndex > 0 && i >= sbinStartIndex { if strings.Contains(summaryRows[i][0], "---------------") { sbinEndIndex = i continue } index := strings.Index(summaryRows[i][0], "-") endIndex := index + 2 for { if string(summaryRows[i][0][endIndex]) == " " { break } endIndex++ } sBinMap[strings.ReplaceAll(summaryRows[i][0][:index], " ", "")] = strings.ReplaceAll(summaryRows[i][0][index+2:endIndex], " ", "") } } } //if details["Test_Code"] == "" { // details["Test_Code"] = step //} sbinHbin, _ := json.Marshal(&sBinMap) fileName := summary.ProductName + "_" + summary.Lot + "_" + details["Test_Code"] + ".csv" var waferID string waferID = summary.ChipNum //if summary.Factory == "yaxinwei" { if summary.Procedure == "CP1" || summary.Procedure == "CP2" { length := len(summary.Name) waferID = summary.Name[length-10 : length-8] fileName = summary.ProductName + "_" + summary.Lot + "_" + waferID + "_" + summary.Procedure + ".csv" } else { if strings.Index(summary.SubBatch, "-") != strings.LastIndex(summary.SubBatch, "-") { summary.SubBatch = summary.SubBatch[:strings.LastIndex(summary.SubBatch, "-")] } fileName = summary.ProductName + "_" + summary.Lot + "_" + summary.SubBatch + "_" + summary.Procedure + ".csv" } //} dirPath := filepath.Join("/testData/online/" + summary.Factory) utils.MakeDir(dirPath) filePath := filepath.Join(dirPath, fileName) var fileHandled *model.FileHandled if _, err := os.Stat(filePath); err != nil { if os.IsNotExist(err) { _, err = os.Create(filePath) if err != nil { log.Println("创建文件失败:", err) return } if summary.ChipNum != "" { waferID = summary.ChipNum } if errors.Is(global.PostGreSQL.Where("name = ?", fileName).First(&fileHandled).Error, gorm.ErrRecordNotFound) { global.PostGreSQL.Create(&model.FileHandled{ Name: fileName, Path: filePath, Size: "", Product: summary.ProductName, PBI: summary.PBI, Factory: summary.Factory, Step: summary.Procedure, Lot: summary.Lot, SubBatch: summary.SubBatch, TestMachineModel: "MT737", TestMachine: details["Tester_NO"], TestProgram: details["Program"], BeginningTime: details["Start"], EndingTime: details["End"], SbinHbin: string(sbinHbin), WaferID: waferID, }) } } } dataLogRows := make([][]string, 0) if strings.ToLower(path.Ext(dataLog.Name)) == ".csv" { csvBytes, err := ioutil.ReadFile(dataLog.Path) if err != nil { log.Println("open err:", err) return } //解决读取csv中文乱码的问题 reader := csv.NewReader(transform.NewReader(bytes.NewReader(csvBytes), simplifiedchinese.GBK.NewDecoder())) //reader := csv.NewReader(file) reader.FieldsPerRecord = -1 csvRows, err := reader.ReadAll() if err != nil { log.Println("open err:", err) return } dataLogRows = csvRows } else if strings.ToLower(path.Ext(dataLog.Name)) == ".xls" { if xlFile, err := xls.Open(dataLog.Path, "utf-8"); err == nil { //第一个sheet sheet := xlFile.GetSheet(0) if sheet.MaxRow != 0 { temp := make([][]string, 0) for i := 0; i < int(sheet.MaxRow); i++ { row := sheet.Row(i) if row == nil { continue } data := make([]string, 0) if row.LastCol() > 0 { for j := 0; j < row.LastCol(); j++ { col := row.Col(j) data = append(data, col) } temp = append(temp, data) } } dataLogRows = append(dataLogRows, temp...) } else { log.Println("open err:", err) return } } } else if strings.ToLower(path.Ext(dataLog.Name)) == ".xlsx" { xlsxFile, err := excelize.OpenFile(dataLog.Path) if err != nil { log.Println("open err:", err) return } sheetName := xlsxFile.GetSheetName(0) dataLogRows, err = xlsxFile.GetRows(sheetName) if err != nil { log.Println("xlsx读取数据失败:", err) return } } newExcel := make([][]string, 0) var title []string titleInfoMap := make(map[string]model.DataInfo) paramsMap := make(map[string]string) titleIndex, unitIndex, limitLIndex, limitUIndex, binIndex, dataIndex := -1, -1, -1, -1, -1, -1 for i := 1; i < len(dataLogRows); i++ { if titleIndex < 0 { if strings.Contains(dataLogRows[i][2], "TestItem") { titleIndex = i title = append(title, []string{"SITE_NUM", "PART_ID", "SOFT_BIN"}...) title = append(title, dataLogRows[i][3:]...) continue } } else { if unitIndex < 0 { if strings.Contains(dataLogRows[i][2], "Unit") { dataLogRows[i] = append(dataLogRows[i], utils.FillData(len(dataLogRows[titleIndex])-len(dataLogRows[i]))...) unitIndex = i continue } } if limitLIndex < 0 { if strings.Contains(dataLogRows[i][2], "LimitL") { dataLogRows[i] = append(dataLogRows[i], utils.FillData(len(dataLogRows[titleIndex])-len(dataLogRows[i]))...) limitLIndex = i continue } } if limitUIndex < 0 { if strings.Contains(dataLogRows[i][2], "LimitH") { dataLogRows[i] = append(dataLogRows[i], utils.FillData(len(dataLogRows[titleIndex])-len(dataLogRows[i]))...) limitUIndex = i continue } } if binIndex < 0 { if strings.Contains(dataLogRows[i][2], "HiFBin") { dataLogRows[i] = append(dataLogRows[i], utils.FillData(len(dataLogRows[titleIndex])-len(dataLogRows[i]))...) binIndex = i continue } } if unitIndex > 0 { //dataIndex < 0 && if dataLogRows[i][1] == "X-Y" { dataIndex = i + 1 break } } } } if titleIndex != -1 { for k, v := range dataLogRows[titleIndex] { titleInfoMap[v] = model.DataInfo{ Unit: dataLogRows[unitIndex][k], LimitL: dataLogRows[limitLIndex][k], LimitU: dataLogRows[limitUIndex][k], } } for i := 3; i < len(title)-1; i++ { paramsMap[title[i]] = dataLogRows[binIndex][i] } for i := dataIndex; i < len(dataLogRows); i++ { if len(dataLogRows[i]) < len(dataLogRows[titleIndex]) { dataLogRows[i] = append(dataLogRows[i], utils.FillData(len(dataLogRows[titleIndex])-len(dataLogRows[i]))...) } dataLogRows[i][0] = strings.ReplaceAll(dataLogRows[i][0], "Site", "") newExcel = append(newExcel, dataLogRows[i]) } } else { log.Println("特殊文件格式,文件路径为:", dataLog.Path) return } newFile, err := os.Stat(filePath) if err != nil { log.Println("获取Excel信息:", dataLog.Path) return } if newFile.Size() == 0 { newExcel = append([][]string{title}, newExcel...) } paramsSbin, _ := json.Marshal(¶msMap) titleInfo, _ := json.Marshal(&titleInfoMap) //strings.ReplaceAll(fileText.Lot, ".", "-") utils.MakeDir(dirPath) newCsv, err := os.OpenFile(filePath, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0644) if err != nil { log.Println("打开新Excel错误:", dataLog.Path) return } defer newCsv.Close() writer := csv.NewWriter(newCsv) defer writer.Flush() err = writer.WriteAll(newExcel) if err != nil { log.Println("写入新Excel错误:", dataLog.Path) return } newFile, err = os.Stat(filePath) if err != nil { log.Println("获取新Excel信息:", dataLog.Path) return } global.PostGreSQL.Model(&fileHandled).Where("name = ?", fileName).Updates(map[string]interface{}{ "params_sbin": string(paramsSbin), "title_info": string(titleInfo), "size": utils.FormatFileSize(float64(newFile.Size())), }) var report *model.Report var step string if dataLog.Procedure == "FT" || dataLog.Procedure == "RT" { step = "FT" } else if strings.Contains(dataLog.Procedure, "CP") { step = "CP" } if errors.Is(global.PostGreSQL.Where("product = ? AND pbi = ? AND factory = ? AND lot = ? AND step = ? AND sub_batch = ? AND wafer_id = ?", dataLog.ProductName, dataLog.PBI, dataLog.Factory, dataLog.Lot, step, dataLog.SubBatch, dataLog.ChipNum).First(&report).Error, gorm.ErrRecordNotFound) { orderDate := carbon.Parse("20" + dataLog.PBI[5:11]).Format("Y-m-d") report = &model.Report{ Product: dataLog.ProductName, PBI: dataLog.PBI, Factory: dataLog.Factory, Step: step, Lot: dataLog.Lot, SubBatch: dataLog.SubBatch, TestMachine: details["Tester ID"], TestProgram: details["Program"], WaferID: dataLog.ChipNum, OrderDate: orderDate, } global.PostGreSQL.Create(&report) } if strings.Contains(dataLog.Procedure, "CP") { SaveCP(report) } else { SaveFT(report) } }