Compare commits

...

10 Commits

Author SHA1 Message Date
nuknal
48c8e98c20 偶发的错误帧不终止处理,直接跳过 2025-04-18 14:38:22 +08:00
nuknal
04a841f0c3 三江源一号在 AOS 帧的数据域最后 2 字节为填充字节 2024-10-11 11:57:07 +08:00
nuknal
2b8d4f933d . 2024-10-11 11:25:16 +08:00
nuknal
839a8648f5 adjust log directory 2024-07-18 10:47:38 +08:00
nuknal
02c042d8c4 only store valid pixels 2024-05-28 22:35:55 +08:00
nuknal
357b62422e 去掉stdout 日志颜色,以便executor只获取到日志正文 2024-05-22 18:15:40 +08:00
nuknal
eb6e0bc95a xml report 2024-05-22 11:31:08 +08:00
nuknal
0634b0106e xml params 2024-05-22 10:33:22 +08:00
nuknal
faf285775f xlsx 2024-05-21 18:05:10 +08:00
nuknal
b60095e5b9 xlsx 2024-05-21 17:56:15 +08:00
16 changed files with 347 additions and 141 deletions

View File

@@ -9,3 +9,6 @@ clean:
linux:
GOOS=linux GOARCH=amd64 go build -o bin/sjy01-preprocessing -ldflags=${COMPILE_LDFLAGS} *.go
release:
docker run --rm -v .:/src -v /Users/lan/workspace/sjy01/build/go:/build/go nuknal/gdal38-cv49-builder sh -c "cd /src && make linux"

View File

@@ -3,14 +3,16 @@ package cmd
import (
"fmt"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"starwiz.cn/sjy01/preprocessing/extract"
)
var (
dataId string
batch bool
output string
dataId string
batch bool
output string
paramsXML string
)
var extractCmd = &cobra.Command{
@@ -18,6 +20,44 @@ var extractCmd = &cobra.Command{
Short: "Extract data from raw data files",
Long: `Extract data from raw data files`,
Run: func(cmd *cobra.Command, args []string) {
if paramsXML != "" {
params, err := extract.LoadL0Params(paramsXML)
if err != nil {
panic(err)
}
log.Println("input data:", params.InputData)
log.Println("output path:", params.OutputPath)
log.Println("temp path:", params.TempPath)
log.Println("data id:", params.DataId)
log.Println("satellite:", params.Satellite)
e := extract.NewExtractor(params)
defer e.Cleanup()
aos, err := e.ExtractAosData()
if err != nil {
panic(err)
}
dats, err := e.ExtractOriginalImageData(aos)
if err != nil {
panic(err)
}
if len(dats) == 0 {
panic(fmt.Errorf("no segment data found"))
}
for i, d := range dats {
e.SeprateAuxAndImgData(d, i)
}
if err := e.Report(); err != nil {
panic(err)
}
return
}
if batch {
ps := params()
@@ -28,6 +68,7 @@ var extractCmd = &cobra.Command{
for i, d := range dats {
e.SeprateAuxAndImgData(d, i)
}
e.Cleanup()
}
} else {
p := &extract.Params{
@@ -43,6 +84,7 @@ var extractCmd = &cobra.Command{
for i, d := range dats {
e.SeprateAuxAndImgData(d, i)
}
e.Cleanup()
}
},
}
@@ -53,12 +95,13 @@ func init() {
extractCmd.Flags().StringVarP(&dataId, "data-id", "d", "051622", "051622")
extractCmd.Flags().BoolVarP(&batch, "batch", "b", false, "true | false")
extractCmd.Flags().StringVarP(&output, "out", "o", "demo/output", "demo/output")
extractCmd.Flags().StringVarP(&paramsXML, "params", "x", "", "parameters file path")
}
func params() []*extract.Params {
var params []*extract.Params
datas := []string{"051622", "051712", "051721",
"051813", "051821", "051823",
"051814", "051821", "051823",
"051921", "051922", "052022"}
for _, d := range datas {
params = append(params, &extract.Params{

View File

@@ -21,7 +21,7 @@ var parseCmd = &cobra.Command{
}
e := extract.NewExtractor(&params)
// p.ParseAuxPlatformWithHead("demo/ref/辅助数据.dat")
err := e.ExtractAux("demo/output/051622/SJY01_PMS_20240516_101236_051622_096.AUX",
_, _, _, err := e.ExtractAux("demo/output/051622/SJY01_PMS_20240516_101236_051622_096.AUX",
"demo/temp/1.xlsx")
if err != nil {

View File

@@ -6,10 +6,6 @@ import (
"github.com/spf13/cobra"
)
var (
paramsXML string
)
var rootCmd = &cobra.Command{
Use: "SJY01 Preprocessing",
Short: "Preprocessing tools for SJY01 original raw data",
@@ -20,7 +16,7 @@ var rootCmd = &cobra.Command{
}
func init() {
rootCmd.PersistentFlags().StringVarP(&paramsXML, "params", "p", "params.xml", "parameters file path")
// rootCmd.PersistentFlags().StringVarP(&paramsXML, "params", "p", "params.xml", "parameters file path")
}
func Execute() error {

View File

@@ -66,6 +66,7 @@ func (p *Extractor) ExtractAosData() (string, error) {
errFrameCnt++
}
// 三江源一号在 AOS 帧的数据域最后 2 字节为填充字节
wData.Write(rawData[i+10 : i+894])
i += 1024
validFrameCnt++

View File

@@ -12,15 +12,16 @@ var (
ReferenceTime2000 = 946728000
)
func (e Extractor) ExtractAux(auxfile, xlsxfile string) error {
func (e *Extractor) ExtractAux(auxfile, xlsxfile string) ([]*AuxFrameHead, []*AuxFocalBox, []*AuxPlatform, error) {
log.Println("extract aux data from", auxfile, "to", xlsxfile)
os.Remove(xlsxfile)
if err := createAuxXlsx(xlsxfile); err != nil {
return err
return nil, nil, nil, err
}
f, err := excelize.OpenFile(xlsxfile)
if err != nil {
return err
return nil, nil, nil, err
}
defer f.Close()
@@ -28,26 +29,37 @@ func (e Extractor) ExtractAux(auxfile, xlsxfile string) error {
data, err := os.ReadFile(auxfile)
if err != nil {
log.Println("read aux data from", auxfile, "error:", err.Error())
return err
return nil, nil, nil, err
}
var afh []*AuxFrameHead
var afb []*AuxFocalBox
var aps []*AuxPlatform
row := 2
col := 1
for i := 0; i < len(data); i += 24 + 128 + 512 {
if i+24+128+512 > len(data) {
break
}
var head AuxFrameHead
head.Decode(data[i : i+24])
l0, _ := head.SaveXlsx(f, col, row)
afh = append(afh, &head)
var box AuxFocalBox
box.Decode(data[i+24 : i+24+128])
l1, _ := box.SaveXlsx(f, col+l0, row)
afb = append(afb, &box)
var plat AuxPlatform
plat.Decode(data[i+24+128 : i+24+128+512])
plat.SaveXlsx(f, col+l0+l1, row)
aps = append(aps, &plat)
row++
}
return f.Save()
err = f.Save()
return afh, afb, aps, err
}

View File

@@ -140,7 +140,7 @@ func (ab AuxFocalBox) SaveXlsx(f *excelize.File, col, row int) (int, error) {
ab.TrainingDone,
ab.WorkMode,
ab.IntegralDirection,
ab.PGAGain,
ab.PGAGainValue(),
ab.PIntegrationLevel,
ab.B1IntegrationLevel,
ab.B2IntegrationLevel,

View File

@@ -737,70 +737,6 @@ func (e *Extractor) ParseAuxPlatform(auxfile string) ([]*AuxPlatform, error) {
return aps, nil
}
func (e *Extractor) ParseAuxPlatformWithHead(auxfile string) ([]*AuxPlatform, error) {
data, err := os.ReadFile(auxfile)
if err != nil {
log.Println("read aux data from", auxfile, "error:", err.Error())
return nil, err
}
fimg, _ := os.Create("demo/temp/ref_051622_aux_img.txt")
defer fimg.Close()
fimg.WriteString("index 流水号 文件号 时间秒 秒小数 utcTime\n")
var aps []*AuxPlatform
rows := 0
for i := 0; i < len(data); {
if data[i] == 0xD1 && data[i+1] == 0x5B && data[i+2] == 0xD1 && data[i+3] == 0x5B {
log.Debug("find package head: 0xD15BD15B")
} else {
i++
// log.Println(i,"not find package head: 0xD15BD15B, skip 1 byte")
continue
}
afh := &AuxFrameHead{}
afh.Decode(data[i : i+24])
if !afh.IsValidFrmHead {
log.Debugf("[%d] invalid frame head of original raw data %v", i, afh.FrmHead)
i += 1
continue
}
if (afh.SerialNo-1)%16 == 0 {
ab := &AuxFocalBox{}
ab.Decode(data[i+24 : i+32])
fmt.Println(ab.String())
utcTime := binary.BigEndian.Uint32(data[i+32 : i+36])
// t := time.Unix(int64(afh.TimeSec+uint32(ReferenceTime2000)), int64(afh.TimeSecFrac)*1000)
taux := time.Unix(int64(utcTime+uint32(ReferenceTime2000)), 0)
fimg.WriteString(
fmt.Sprintf("%d %d %d %d %d %d %s\n",
i,
afh.SerialNo,
afh.FileNo,
afh.TimeSec,
afh.TimeSecFrac,
utcTime,
taux.String(),
))
rows++
// if rows > 32 {
// break
// }
}
i += 64
}
return aps, nil
}
func Time2000UTCSec() int64 {
t, _ := time.ParseInLocation("2006-01-02 15:04:05", "2000-01-01 12:00:00", time.UTC)
return t.Unix()

View File

@@ -1,5 +1,11 @@
package extract
import (
"encoding/xml"
"fmt"
"os"
)
type Params struct {
InputData string
OutputPath string
@@ -12,5 +18,86 @@ type Params struct {
Station string
}
// 定义 InputFileList 结构体
type InputFileList struct {
InputData string `xml:"inputdata"`
}
// 定义 OutputFileList 结构体
type OutputFileList struct {
Num int `xml:"num,attr"`
OutputPath string `xml:"outputPath"`
TempPath string `xml:"tempPath"`
ReportFile string `xml:"reportFile"`
ResultFile string `xml:"resultFile"`
}
// 定义 Params 结构体
type XMLParams struct {
StationId string `xml:"stationId"`
Sensor string `xml:"sensor"`
DataId string `xml:"dataId"`
SatelliteId string `xml:"satelliteId"`
}
// 定义 Task 结构体
type L0Task struct {
XMLName xml.Name `xml:"task"`
InputFileList InputFileList `xml:"inputFileList"`
OutputFileList OutputFileList `xml:"outputfilelist"`
Params XMLParams `xml:"params"`
}
// L0 ID
const L0_ID = `{{.Satellite}}_{{.Sensor}}_{{.YYMMDD}}_{{.HHMMSS}}_{{.DataId}}_{{.Index}}`
const L0TPLFile = "resource/template/satellite/sjy01.l0.xml.tpl"
const L0TPL = `<?xml version="1.0" encoding="UTF-8"?>
<task>
<inputFileList>
<inputdata>{{.InputData}}</inputdata>
</inputFileList>
<outputfilelist>
<outputPath>{{.OutputPath}}</outputPath>
<TempPath>{{.TempPath}}</TempPath>
<reportFile>{{.ReportFile}}</reportFile>
<resultFile>{{.ResultFile}}</resultFile>
</outputfilelist>
<params>
<stationId>QH</stationId>
<sensor>PMS</sensor>
<dataId>{{.DataId}}</dataId>
<satelliteId>SJY01</satelliteId>
</params>
</task>
`
func LoadL0Params(fname string) (*Params, error) {
data, err := os.ReadFile(fname)
if err != nil {
return nil, err
}
var task L0Task
err = xml.Unmarshal(data, &task)
if err != nil {
return nil, err
}
var params Params
params.InputData = task.InputFileList.InputData
params.OutputPath = task.OutputFileList.OutputPath
params.Satellite = task.Params.SatelliteId
params.DataId = task.Params.DataId
params.Report = task.OutputFileList.ReportFile
params.Result = task.OutputFileList.ResultFile
params.Station = task.Params.StationId
params.LogFile = fmt.Sprintf("/tmp/SJY01/log/%s_l0.log", params.DataId)
params.TempPath = task.OutputFileList.TempPath
if params.TempPath == "" {
params.TempPath = "/tmp/SJY01/data"
}
return &params, nil
}

View File

@@ -2,17 +2,29 @@ package extract
import (
"os"
"path/filepath"
"sync"
)
type Extractor struct {
params *Params
Clean bool
mutex sync.RWMutex
report *Report
}
func NewExtractor(params *Params) *Extractor {
os.MkdirAll(params.OutputPath, 0755)
os.MkdirAll(params.TempPath, 0755)
return &Extractor{params: params}
if err := os.MkdirAll(params.OutputPath, 0755); err != nil {
panic(err)
}
if err := os.MkdirAll(params.TempPath, 0755); err != nil {
panic(err)
}
if err := os.MkdirAll(filepath.Dir(params.LogFile), 0755); err != nil {
panic(err)
}
return &Extractor{params: params, Clean: true, report: &Report{SegmentDirRoot: params.OutputPath}}
}
func (e *Extractor) Cleanup() error {

46
extract/report.go Normal file
View File

@@ -0,0 +1,46 @@
package extract
import (
"encoding/xml"
"os"
"path/filepath"
)
type Report struct {
XMLName xml.Name `xml:"report"`
SegmentDirRoot string `xml:"segmentDirRoot"`
Segments []Segment `xml:"segments>segment"`
}
type XMLTime struct {
TimeZone string `xml:"timeZone,attr"`
Value string `xml:",chardata"`
}
type Segment struct {
Id string `xml:"segmentId"`
Aux string `xml:"aux"`
Pan string `xml:"pan"`
Mss string `xml:"mss"`
StartTime XMLTime `xml:"startTime"`
EndTime XMLTime `xml:"endTime"`
Meta string `xml:"meta"`
}
func (e *Extractor) Report() error {
os.MkdirAll(filepath.Dir(e.params.Report), 0755)
f, err := os.Create(e.params.Report)
if err != nil {
return err
}
defer f.Close()
data, err := xml.MarshalIndent(e.report, " ", " ")
if err != nil {
return err
}
_, err = f.Write(data)
return err
}

View File

@@ -6,10 +6,10 @@ import (
"fmt"
"math"
"os"
"path/filepath"
"strings"
"time"
"github.com/sirupsen/logrus"
log "github.com/sirupsen/logrus"
)
@@ -50,6 +50,7 @@ func (e *Extractor) SeprateAuxAndImgData(dataFile string, segmentIndex int) erro
var afh AuxFrameHead
pandata := make([]byte, 0)
msdata := make([][]byte, 4)
var header []byte
var ebAux []byte
@@ -57,7 +58,7 @@ func (e *Extractor) SeprateAuxAndImgData(dataFile string, segmentIndex int) erro
for i := 0; i < dataLen; {
if i+4 > dataLen {
logrus.Println("end of data, dataLen:", dataLen, "i:", i)
log.Println("end of data, dataLen:", dataLen, "i:", i)
break
}
@@ -107,19 +108,22 @@ func (e *Extractor) SeprateAuxAndImgData(dataFile string, segmentIndex int) erro
platAux = append(platAux, data[dataIndex:dataIndex+32]...)
dataIndex += 32
if !afh.B0 || !afh.B1 || !afh.B2 || !afh.B3 || !afh.B4 {
log.Error("not all bands are available")
break
}
// 存储图像数据到临时文件 - 以 ENVI BSQ 格式存储,同时提供 HDR 描述文件
if afh.B0 {
// wpan.Write(data[dataIndex : dataIndex+19040])
write16bPixelLittleEndian(lw.ws[PAN_RAW].w, data[dataIndex:dataIndex+19040])
pandata = append(pandata, data[dataIndex:dataIndex+19040]...)
dataIndex += 19040
panEnviHdr.Lines += 1
}
if afh.B1 {
// write16bPixelLittleEndian(wmss, data[dataIndex:dataIndex+1192])
msdata[0] = append(msdata[0], data[dataIndex:dataIndex+1192]...)
dataIndex += 1192
mssEnviHdr.Lines += 1
}
if afh.B2 {
// write16bPixelLittleEndian(wmss, data[dataIndex:dataIndex+1192])
@@ -139,53 +143,12 @@ func (e *Extractor) SeprateAuxAndImgData(dataFile string, segmentIndex int) erro
i = dataIndex // 完成一行数据解析
}
// var bands = 0
// for i := 0; i < 4; i++ {
// if len(msdata[i]) > 0 {
// log.Println("write mss data of band B", i+1)
// _, err := write16bPixelLittleEndian(lw.ws[MSS_RAW].w, msdata[i])
// if err != nil {
// log.Error("write mss data error:", err.Error())
// }
// bands += 1
// }
// }
if len(msdata[0]) != len(msdata[1]) || len(msdata[2]) != len(msdata[3]) {
log.Error("mss data of bands B1-B4 are not equal")
return errors.New("mss data of bands B1-B4 are not equal")
}
mssRowLen := 1192 * 4
for i := 0; i < len(msdata[0]); i += mssRowLen {
var err error
_, err = write16bPixelLittleEndian(lw.ws[MSS_RAW].w, msdata[0][i:i+mssRowLen])
if err != nil {
log.Error("write mss 1 data error:", err.Error())
}
_, err = write16bPixelLittleEndian(lw.ws[MSS_RAW].w, msdata[1][i:i+mssRowLen])
if err != nil {
log.Error("write mss 2 data error:", err.Error())
}
_, err = write16bPixelLittleEndian(lw.ws[MSS_RAW].w, msdata[2][i:i+mssRowLen])
if err != nil {
log.Error("write mss 3 data error:", err.Error())
}
_, err = write16bPixelLittleEndian(lw.ws[MSS_RAW].w, msdata[3][i:i+mssRowLen])
if err != nil {
log.Error("write mss 4 data error:", err.Error())
}
}
panEnviHdr.Samples = 9520
panEnviHdr.Bands = 1
lw.ws[PAN_HDR].w.Write([]byte(panEnviHdr.String()))
mssEnviHdr.Lines = mssEnviHdr.Lines / 4 // 多光谱波段分别在 4 行中传输
mssEnviHdr.Samples = 2384 * 4
mssEnviHdr.Bands = 1
lw.ws[MSS_HDR].w.Write([]byte(mssEnviHdr.String()))
// 先判断辅助数据是否完整
// 帧头+辅助数据
if len(header)/24 < len(ebAux)/128 || len(ebAux)/128 != len(platAux)/512 {
fmt.Println("aux data length:", len(header)/24, len(ebAux)/128, len(platAux)/512)
@@ -199,6 +162,87 @@ func (e *Extractor) SeprateAuxAndImgData(dataFile string, segmentIndex int) erro
lw.ws[AUX].w.Write(platAux[i*512 : (i+1)*512])
}
_, _, aps, err := e.ExtractAux(lw.ws[AUX].name, lw.ws[AUX].name+".xlsx")
if err != nil {
return err
}
auxHeight := len(aps) * 16 // 16行图像数据为一组辅助数据
panHeight := len(pandata) / 19040
mssHeight := 4 * len(msdata[0]) / 4768
log.Println("pan height:", panHeight, "mss height:", mssHeight, "aux height:", auxHeight)
// 取最小值作为有效数据长度
efficientHeight := int(math.Min(float64(panHeight), float64(mssHeight)))
efficientHeight = int(math.Min(float64(efficientHeight), float64(auxHeight)))
log.Println("efficient height:", efficientHeight)
// 写入pan数据
for i := 0; i < len(pandata); i += 19040 {
start := i + (2+48+38)*2
end := start + 18688
write16bPixelLittleEndian(lw.ws[PAN_RAW].w, pandata[start:end])
panEnviHdr.Lines += 1
if panEnviHdr.Lines >= efficientHeight {
break
}
}
mssRowLen := 1192 * 4
for i := 0; i < len(msdata[0]); i += mssRowLen {
var err error
start := i + (1+11+10)*2
end := start + 4672
_, err = write16bPixelLittleEndian(lw.ws[MSS_RAW].w, msdata[0][start:end])
if err != nil {
log.Error("write mss 1 data error:", err.Error())
}
_, err = write16bPixelLittleEndian(lw.ws[MSS_RAW].w, msdata[1][start:end])
if err != nil {
log.Error("write mss 2 data error:", err.Error())
}
_, err = write16bPixelLittleEndian(lw.ws[MSS_RAW].w, msdata[2][start:end])
if err != nil {
log.Error("write mss 3 data error:", err.Error())
}
_, err = write16bPixelLittleEndian(lw.ws[MSS_RAW].w, msdata[3][start:end])
if err != nil {
log.Error("write mss 4 data error:", err.Error())
}
mssEnviHdr.Lines += 1
if mssEnviHdr.Lines*4 >= efficientHeight {
break
}
}
panEnviHdr.Samples = 9344
panEnviHdr.Bands = 1
lw.ws[PAN_HDR].w.Write([]byte(panEnviHdr.String()))
mssEnviHdr.Samples = 2336 * 4
mssEnviHdr.Bands = 1
lw.ws[MSS_HDR].w.Write([]byte(mssEnviHdr.String()))
fmt.Println("pan height:", panEnviHdr.Lines, "mss height:", mssEnviHdr.Lines)
e.mutex.Lock()
defer e.mutex.Unlock()
seg := Segment{
Pan: lw.ws[PAN_RAW].name,
Mss: lw.ws[MSS_RAW].name,
Aux: lw.ws[AUX].name,
Id: strings.Split(filepath.Base(lw.ws[AUX].name), ".")[0],
StartTime: XMLTime{
TimeZone: "UTC",
Value: time.Unix(int64(aps[0].UTCTimeSec)+int64(ReferenceTime2000), int64(aps[0].Microsecond)*1000).UTC().Format(time.RFC3339),
},
EndTime: XMLTime{
TimeZone: "UTC",
Value: time.Unix(int64(aps[len(aps)-1].UTCTimeSec)+int64(ReferenceTime2000), int64(aps[len(aps)-1].Microsecond)*1000).UTC().Format(time.RFC3339),
},
}
e.report.Segments = append(e.report.Segments, seg)
return nil
}
@@ -287,3 +331,25 @@ func (e *Extractor) trimImgRawData(data []byte) ([]byte, *AuxFrameHead) {
return data[start:end], afh
}
// 9520 16bit 像素 19040 字节,返回有效像素字节起始位置
// 9520 像素 2(Margin) + 48(OB) + 38(Margin) + 9344(Effective Pixels) + 38(Margin) + 48(OB) + 2(Margin)
// 7056 像素 2(Margin) + 12(OB) + 18(Margin) + 20(Margin) + 7000(Effective Pixels) + 4(Margin)
func PanEffectivePixel(row []byte) (start, end int) {
if len(row) < 19040 {
return -1, -1
}
start = (0 + 2 + 48 + 38) * 2
end = 19040 - (2+48+28)*2
return start, end
}
// 2384 像素 1+11+10+2336+10+8+8
func MssEffectivePixel(row []byte) (start, end int) {
if len(row) < 4768 {
return -1, -1
}
start = (1 + 11 + 10) * 2
end = 4768 - (10+8+8)*2
return start, end
}

View File

@@ -59,9 +59,6 @@ func (p *Extractor) ExtractOriginalImageData(aosDataFile string) ([]string, erro
snRange := map[int][]uint32{}
datSet := map[int][]byte{}
fsn, _ := os.Create("demo/temp/tf_sn.txt")
defer fsn.Close()
var i int
for i < len(aosData) {
if i+4 > len(aosData) {
@@ -79,11 +76,9 @@ func (p *Extractor) ExtractOriginalImageData(aosDataFile string) ([]string, erro
tf.Decode(aosData[i : i+TransImageFrameLength])
fileno := int(tf.FileNo)
snRange[fileno] = append(snRange[fileno], tf.SNo)
fsn.WriteString(fmt.Sprintf("%d %d %d\n", i, tf.SNo, fileno))
if tf.SecretFlag == 0xAA {
log.Info("secret frame is not supported")
break
// break
}
// 只保留非空帧
@@ -100,6 +95,9 @@ func (p *Extractor) ExtractOriginalImageData(aosDataFile string) ([]string, erro
}
log.Println("null transfer frame cnt:", nullFrmCnt)
if len(snRange) == 0 {
log.Errorf("no valid transfer frame found in %s", aosDataFile)
}
for k, v := range snRange {
vv := slice.Unique(v)

5
go.mod
View File

@@ -2,7 +2,10 @@ module starwiz.cn/sjy01/preprocessing
go 1.20
require github.com/xuri/excelize/v2 v2.8.1
require (
github.com/xuri/excelize/v2 v2.8.1
gocv.io/x/gocv v0.36.1
)
require (
github.com/jonboulle/clockwork v0.4.0 // indirect

2
go.sum
View File

@@ -84,6 +84,8 @@ github.com/xuri/excelize/v2 v2.8.1/go.mod h1:oli1E4C3Pa5RXg1TBXn4ENCXDV5JUMlBluU
github.com/xuri/nfp v0.0.0-20230919160717-d98342af3f05 h1:qhbILQo1K3mphbwKh1vNm4oGezE1eF9fQWmNiIpSfI4=
github.com/xuri/nfp v0.0.0-20230919160717-d98342af3f05/go.mod h1:WwHg+CVyzlv/TX9xqBFXEZAuxOPxn2k1GNHwG41IIUQ=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
gocv.io/x/gocv v0.36.1 h1:6XkEaPOk7h/umjy+MXgSEtSeCIgcPJhccUjrJFhjdTY=
gocv.io/x/gocv v0.36.1/go.mod h1:lmS802zoQmnNvXETpmGriBqWrENPei2GxYx5KUxJsMA=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=

View File

@@ -38,8 +38,8 @@ func init() {
FullTimestamp: true,
TimestampFormat: "2006-01-02.15:04:05",
ForceFormatting: true,
ForceColors: true,
DisableColors: false,
ForceColors: false,
DisableColors: true,
}
fileFormatter = &prefixed.TextFormatter{
FullTimestamp: true,
@@ -49,7 +49,8 @@ func init() {
DisableColors: true,
}
configureLogger(logrus.StandardLogger(), "log/app.log", logrus.InfoLevel)
os.MkdirAll("log/SJY01ImageProc", 0755)
configureLogger(logrus.StandardLogger(), "log/SJY01ImageProc/preproc.log", logrus.InfoLevel)
}
func NewLogger(logfile string) *logrus.Logger {
@@ -65,7 +66,7 @@ func configureLogger(logger *logrus.Logger, logfile string, level logrus.Level)
writer, _ := rotatelogs.New(
logfile+".%Y%m%d",
rotatelogs.WithLinkName(logfile),
rotatelogs.WithMaxAge(time.Duration(30*24)*time.Hour),
rotatelogs.WithMaxAge(time.Duration(7*24)*time.Hour),
rotatelogs.WithRotationTime(time.Duration(24)*time.Hour),
)