package importer import ( "encoding/json" "fmt" "io" "os" "path/filepath" "sync" "sync/atomic" "git.wecise.com/wecise/cgimport/odbc" "git.wecise.com/wecise/cgimport/reader" "git.wecise.com/wecise/util/filewalker" "git.wecise.com/wecise/util/merrs" "git.wecise.com/wecise/util/rc" ) var mcfg = odbc.Config var logger = odbc.Logger type Importer struct { datapath string parallel int importrc *rc.RoutinesController odbcimporter *ODBCImporter } func ImportDir(datapath string, parallel int) (filescount, recordscount int64, err error) { importer := &Importer{ datapath: datapath, parallel: parallel, importrc: rc.NewRoutinesController("", 1000), odbcimporter: NewODBCImporter(), } return importer.Import() } func (importer *Importer) Import() (filescount, recordscount int64, err error) { // 遍历文件目录 var cgirc = rc.NewRoutinesController("", importer.parallel) var wg sync.WaitGroup fw, e := filewalker.NewFileWalker([]string{importer.datapath}, ".*") if e != nil { err = e return } e = fw.List(func(basedir string, fpath string) bool { if err != nil { return false } filename := filepath.Join(basedir, fpath) wg.Add(1) cgirc.ConcurCall(1, func() { defer wg.Done() records, e := importer.ImportFile(filename) if e != nil { err = e return } atomic.AddInt64(&filescount, 1) atomic.AddInt64(&recordscount, int64(records)) }, ) return true }) wg.Wait() if e != nil { err = e return } return } func (importer *Importer) ImportFile(filepath string) (blockcount int, err error) { f, e := os.Open(filepath) if e != nil { return blockcount, merrs.NewError(e, merrs.SSMaps{{"filename": filepath}}) } defer f.Close() return importer.importReader(filepath, f) } func (importer *Importer) importReader(filename string, buf io.Reader) (blockcount int, err error) { br, e := reader.NewBlockReader(filename, buf) if e != nil { return blockcount, merrs.NewError(e, merrs.SSMaps{{"filename": filename}}) } var wg sync.WaitGroup defer wg.Wait() for { if err != nil { break } block, linecount, e := br.ReadBlock() if e != nil { return blockcount, merrs.NewError(e, merrs.SSMaps{{"filename": filename}, {"line": fmt.Sprint(linecount)}}) } if block == nil { return } wg.Add(1) e = importer.importrc.ConcurCall(1, func() { defer wg.Done() e = importer.importRecord(block, filename, linecount) if e != nil { err = merrs.NewError(e, merrs.SSMaps{{"filename": filename}, {"line": fmt.Sprint(linecount)}}) return } blockcount++ }) if e != nil { return blockcount, merrs.NewError(e, merrs.SSMaps{{"filename": filename}, {"line": fmt.Sprint(linecount)}}) } } return } func (importer *Importer) importRecord(record map[string]any, filename string, linecount int) (err error) { bs, e := json.MarshalIndent(record, "", " ") if e != nil { return e } logger.Debug(fmt.Sprint("import ", filename, "[", linecount, "]:", string(bs))) e = importer.odbcimporter.ReviseClassStruct(record) if e != nil { return e } e = importer.odbcimporter.InsertData(record) if e != nil { return e } return }