package main import ( "fmt" "time" "database/sql" "github.com/astaxie/beego/logs" ) var ( file_cache_data map[string]plan_file file_cache_time string file_ch chan int ) func init() { file_cache_data = make(map[string]plan_file) file_cache_time = "2023-01-01 00:00:00" file_ch = make(chan int, 100) } func sync_file_cache() { var sqlstr string var rdRow *sql.Rows var err error sqlstr = fmt.Sprintf("select [ID],[p_id],[p_name],[plan_name],[process_file],[filing_time],[project_file],[p_filing_time],[op_time] from [plan_file] order by ID asc") fmt.Println(sqlstr) rdRow, err = sqlConn.Query(sqlstr) if err != nil { logs.Error(fmt.Sprintf("sync_file_cache Query err:%v", err.Error())) goto exit } for rdRow.Next() { var d plan_file var op_t string if err := rdRow.Scan(&d.ID, &d.P_id, &d.P_name, &d.Plan_name, &d.Plan_process_file, &d.Filing_time, &d.Project_file, &d.P_filing_time, &op_t); err == nil { file_cache_data[d.P_id+d.Plan_name] = d if op_t > file_cache_time { file_cache_time = op_t } } else { logs.Error("sync_file_cache scan Error", err.Error()) } } rdRow.Close() for { select { case <-file_ch: sqlstr = fmt.Sprintf("select [ID],[p_id],[p_name],[plan_name],[process_file],[filing_time],[project_file],[p_filing_time],[op_time] from [plan_file] where [op_time]>'%s' order by ID asc", file_cache_time) fmt.Println(sqlstr) rdRow, err = sqlConn.Query(sqlstr) if err != nil { logs.Error(fmt.Sprintf("sync_file_cache Query err:%v", err.Error())) } else { for rdRow.Next() { var d plan_file var op_t string if err := rdRow.Scan(&d.ID, &d.P_id, &d.P_name, &d.Plan_name, &d.Plan_process_file, &d.Filing_time, &d.Project_file, &d.P_filing_time, &op_t); err == nil { file_cache_data[d.P_id+d.Plan_name] = d if op_t > file_cache_time { file_cache_time = op_t } } else { logs.Error("sync_file_cache scan Error", err.Error()) } } rdRow.Close() } default: time.Sleep(time.Second * 5) } } exit: return }