diff --git a/_example/import/main.go b/_example/import/main.go index 0989902..8baf2fc 100644 --- a/_example/import/main.go +++ b/_example/import/main.go @@ -9,7 +9,7 @@ import ( type importer struct { } -func (f *importer) Import(db *trdsql.DDB, query string) (string, error) { +func (f *importer) Import(db *trdsql.DB, query string) (string, error) { err := db.CreateTable("test", []string{"a"}, []string{"text"}) if err != nil { return query, err diff --git a/_example/simple/main.go b/_example/simple/main.go index 4b83111..1f64624 100644 --- a/_example/simple/main.go +++ b/_example/simple/main.go @@ -11,7 +11,7 @@ type ArrayTable struct { table [][]string } -func (a *ArrayTable) First(columns []string, types []string) error { +func (a *ArrayTable) PreWrite(columns []string, types []string) error { a.table = make([][]string, 0) fmt.Println(columns, types) return nil @@ -24,7 +24,7 @@ func (a *ArrayTable) WriteRow(values []interface{}, columns []string) error { a.table = append(a.table, row) return nil } -func (a *ArrayTable) Last() error { +func (a *ArrayTable) PostWrite() error { return nil } diff --git a/exporter.go b/exporter.go index 3b2fcf3..6081232 100644 --- a/exporter.go +++ b/exporter.go @@ -76,7 +76,7 @@ func (e *exporter) Export(db *DB, query string) error { types[i] = ct.DatabaseTypeName() } - err = e.Writer.First(columns, types) + err = e.Writer.PreWrite(columns, types) if err != nil { return err } @@ -90,7 +90,7 @@ func (e *exporter) Export(db *DB, query string) error { return err } } - return e.Writer.Last() + return e.Writer.PostWrite() } func ConvertTypes(dbTypes []string) []string { @@ -139,33 +139,3 @@ func ValString(v interface{}) string { } return str } - -// Writer is file format writer -type Writer interface { - First([]string, []string) error - WriteRow([]interface{}, []string) error - Last() error -} - -func NewWriter(writeOpts WriteOpts) Writer { - switch writeOpts.OutFormat { - case LTSV: - return NewLTSVWrite(writeOpts) - case JSON: - return NewJSONWrite(writeOpts) - case RAW: - return NewRAWWrite(writeOpts) - case MD: - return NewTWWrite(writeOpts, true) - case AT: - return NewTWWrite(writeOpts, false) - case VF: - return NewVFWrite(writeOpts) - case TBLN: - return NewTBLNWrite(writeOpts) - case CSV: - return NewCSVWrite(writeOpts) - default: - return NewCSVWrite(writeOpts) - } -} diff --git a/input_csv.go b/input_csv.go index da1a4b5..201a57d 100644 --- a/input_csv.go +++ b/input_csv.go @@ -17,33 +17,33 @@ type CSVRead struct { inHeader bool } -func NewCSVReader(r io.Reader, opts ReadOpts) (Reader, error) { +func NewCSVReader(reader io.Reader, opts ReadOpts) (Reader, error) { var err error if opts.InHeader { opts.InPreRead-- } - cr := &CSVRead{} - cr.reader = csv.NewReader(r) - cr.reader.LazyQuotes = true - cr.reader.FieldsPerRecord = -1 // no check count - cr.reader.TrimLeadingSpace = true - cr.inHeader = opts.InHeader - cr.reader.Comma, err = delimiter(opts.InDelimiter) + r := &CSVRead{} + r.reader = csv.NewReader(reader) + r.reader.LazyQuotes = true + r.reader.FieldsPerRecord = -1 // no check count + r.reader.TrimLeadingSpace = true + r.inHeader = opts.InHeader + r.reader.Comma, err = delimiter(opts.InDelimiter) if opts.InSkip > 0 { skip := make([]interface{}, 1) for i := 0; i < opts.InSkip; i++ { - r, e := cr.ReadRow(skip) - if e != nil { - log.Printf("ERROR: skip error %s", e) + row, err := r.ReadRow(skip) + if err != nil { + log.Printf("ERROR: skip error %s", err) break } - debug.Printf("Skip row:%s\n", r) + debug.Printf("Skip row:%s\n", row) } } - return cr, err + return r, err } func delimiter(sepString string) (rune, error) { @@ -60,56 +60,56 @@ func delimiter(sepString string) (rune, error) { // GetColumn is reads the specified number of rows and determines the column name. // The previously read row is stored in preRead. -func (cr *CSVRead) GetColumn(rowNum int) ([]string, error) { +func (r *CSVRead) GetColumn(rowNum int) ([]string, error) { // Header - if cr.inHeader { - row, err := cr.reader.Read() + if r.inHeader { + row, err := r.reader.Read() if err != nil { return nil, err } - cr.names = make([]string, len(row)) + r.names = make([]string, len(row)) for i, col := range row { if col == "" { - cr.names[i] = "c" + strconv.Itoa(i+1) + r.names[i] = "c" + strconv.Itoa(i+1) } else { - cr.names[i] = col + r.names[i] = col } } } for n := 0; n < rowNum; n++ { - row, err := cr.reader.Read() + row, err := r.reader.Read() if err != nil { - return cr.names, err + return r.names, err } rows := make([]string, len(row)) for i, col := range row { rows[i] = col - if len(cr.names) < i+1 { - cr.names = append(cr.names, "c"+strconv.Itoa(i+1)) + if len(r.names) < i+1 { + r.names = append(r.names, "c"+strconv.Itoa(i+1)) } } - cr.preRead = append(cr.preRead, rows) + r.preRead = append(r.preRead, rows) } - return cr.names, nil + return r.names, nil } // GetTypes is reads the specified number of rows and determines the column type. -func (cr *CSVRead) GetTypes() ([]string, error) { - cr.types = make([]string, len(cr.names)) - for i := 0; i < len(cr.names); i++ { - cr.types[i] = DefaultDBType +func (r *CSVRead) GetTypes() ([]string, error) { + r.types = make([]string, len(r.names)) + for i := 0; i < len(r.names); i++ { + r.types[i] = DefaultDBType } - return cr.types, nil + return r.types, nil } // PreReadRow is returns only columns that store preread rows. -func (cr *CSVRead) PreReadRow() [][]interface{} { - rowNum := len(cr.preRead) +func (r *CSVRead) PreReadRow() [][]interface{} { + rowNum := len(r.preRead) rows := make([][]interface{}, rowNum) for n := 0; n < rowNum; n++ { - rows[n] = make([]interface{}, len(cr.names)) - for i, f := range cr.preRead[n] { + rows[n] = make([]interface{}, len(r.names)) + for i, f := range r.preRead[n] { rows[n][i] = f } } @@ -117,8 +117,8 @@ func (cr *CSVRead) PreReadRow() [][]interface{} { } // ReadRow is read the rest of the row. -func (cr *CSVRead) ReadRow(row []interface{}) ([]interface{}, error) { - record, err := cr.reader.Read() +func (r *CSVRead) ReadRow(row []interface{}) ([]interface{}, error) { + record, err := r.reader.Read() if err != nil { return row, err } diff --git a/input_json.go b/input_json.go index b0ac9c5..d07162e 100644 --- a/input_json.go +++ b/input_json.go @@ -17,10 +17,10 @@ type JSONRead struct { count int } -func NewJSONReader(r io.Reader) (Reader, error) { - jr := &JSONRead{} - jr.reader = json.NewDecoder(r) - return jr, nil +func NewJSONReader(reader io.Reader) (Reader, error) { + r := &JSONRead{} + r.reader = json.NewDecoder(reader) + return r, nil } // Convert JSON to a table. @@ -30,80 +30,80 @@ func NewJSONReader(r io.Reader) (Reader, error) { // GetColumn is reads the specified number of rows and determines the column name. // The previously read row is stored in preRead. -func (jr *JSONRead) GetColumn(rowNum int) ([]string, error) { +func (r *JSONRead) GetColumn(rowNum int) ([]string, error) { var top interface{} names := map[string]bool{} for i := 0; i < rowNum; i++ { - row, keys, err := jr.readAhead(top, i) + row, keys, err := r.readAhead(top, i) if err != nil { - return jr.names, err + return r.names, err } - jr.preRead = append(jr.preRead, row) + r.preRead = append(r.preRead, row) for k := 0; k < len(keys); k++ { if !names[keys[k]] { names[keys[k]] = true - jr.names = append(jr.names, keys[k]) + r.names = append(r.names, keys[k]) } } } - return jr.names, nil + return r.names, nil } // GetTypes is reads the specified number of rows and determines the column type. -func (jr *JSONRead) GetTypes() ([]string, error) { - jr.types = make([]string, len(jr.names)) - for i := 0; i < len(jr.names); i++ { - jr.types[i] = DefaultDBType +func (r *JSONRead) GetTypes() ([]string, error) { + r.types = make([]string, len(r.names)) + for i := 0; i < len(r.names); i++ { + r.types[i] = DefaultDBType } - return jr.types, nil + return r.types, nil } -func (jr *JSONRead) readAhead(top interface{}, count int) (map[string]string, []string, error) { - if jr.inArray != nil { - if len(jr.inArray) > count { - jr.count++ - return jr.secondLevel(top, jr.inArray[count]) +func (r *JSONRead) readAhead(top interface{}, count int) (map[string]string, []string, error) { + if r.inArray != nil { + if len(r.inArray) > count { + r.count++ + return r.secondLevel(top, r.inArray[count]) } return nil, nil, io.EOF } - err := jr.reader.Decode(&top) + err := r.reader.Decode(&top) if err != nil { return nil, nil, err } - return jr.topLevel(top) + return r.topLevel(top) } -func (jr *JSONRead) topLevel(top interface{}) (map[string]string, []string, error) { +func (r *JSONRead) topLevel(top interface{}) (map[string]string, []string, error) { switch obj := top.(type) { case []interface{}: // [{} or [] or etc...] - jr.inArray = obj - return jr.secondLevel(top, jr.inArray[0]) + r.inArray = obj + return r.secondLevel(top, r.inArray[0]) case map[string]interface{}: // {"a":"b"} object - jr.inArray = nil - return jr.objectFirstRow(obj) + r.inArray = nil + return r.objectFirstRow(obj) } return nil, nil, fmt.Errorf("JSON format could not be converted") } // Analyze second when top is array -func (jr *JSONRead) secondLevel(top interface{}, second interface{}) (map[string]string, []string, error) { +func (r *JSONRead) secondLevel(top interface{}, second interface{}) (map[string]string, []string, error) { switch obj := second.(type) { case map[string]interface{}: // [{}] - return jr.objectFirstRow(obj) + return r.objectFirstRow(obj) case []interface{}: // [[]] - return jr.etcFirstRow(second) + return r.etcFirstRow(second) default: // ["a","b"] - jr.inArray = nil - return jr.etcFirstRow(top) + r.inArray = nil + return r.etcFirstRow(top) } } -func (jr *JSONRead) objectFirstRow(obj map[string]interface{}) (map[string]string, []string, error) { +func (r *JSONRead) objectFirstRow(obj map[string]interface{}) (map[string]string, []string, error) { // {"a":"b"} object name := make([]string, 0, len(obj)) row := make(map[string]string) @@ -114,7 +114,7 @@ func (jr *JSONRead) objectFirstRow(obj map[string]interface{}) (map[string]strin return row, name, nil } -func (jr *JSONRead) etcFirstRow(val interface{}) (map[string]string, []string, error) { +func (r *JSONRead) etcFirstRow(val interface{}) (map[string]string, []string, error) { // ex. array array // [["a"], // ["b"]] @@ -140,54 +140,54 @@ func jsonString(val interface{}) string { } // PreReadRow is returns only columns that store preread rows. -func (jr *JSONRead) PreReadRow() [][]interface{} { - rowNum := len(jr.preRead) +func (r *JSONRead) PreReadRow() [][]interface{} { + rowNum := len(r.preRead) rows := make([][]interface{}, rowNum) for n := 0; n < rowNum; n++ { - rows[n] = make([]interface{}, len(jr.names)) - for i := range jr.names { - rows[n][i] = jr.preRead[n][jr.names[i]] + rows[n] = make([]interface{}, len(r.names)) + for i := range r.names { + rows[n][i] = r.preRead[n][r.names[i]] } } return rows } // ReadRow is read the rest of the row. -func (jr *JSONRead) ReadRow(row []interface{}) ([]interface{}, error) { - if jr.inArray != nil { +func (r *JSONRead) ReadRow(row []interface{}) ([]interface{}, error) { + if r.inArray != nil { // [] array - jr.count++ - if jr.count >= len(jr.inArray) { + r.count++ + if r.count >= len(r.inArray) { var top interface{} - err := jr.reader.Decode(&top) + err := r.reader.Decode(&top) if err != nil { return nil, err } - jr.count = 0 + r.count = 0 } - if len(jr.inArray) > 0 { - row = jr.rowParse(row, jr.inArray[jr.count]) + if len(r.inArray) > 0 { + row = r.rowParse(row, r.inArray[r.count]) } } else { // {} object var data interface{} - err := jr.reader.Decode(&data) + err := r.reader.Decode(&data) if err != nil { return nil, fmt.Errorf("json format error:%s", err) } - row = jr.rowParse(row, data) + row = r.rowParse(row, data) } return row, nil } -func (jr *JSONRead) rowParse(row []interface{}, jsonRow interface{}) []interface{} { +func (r *JSONRead) rowParse(row []interface{}, jsonRow interface{}) []interface{} { switch m := jsonRow.(type) { case map[string]interface{}: - for i := range jr.names { - row[i] = jsonString(m[jr.names[i]]) + for i := range r.names { + row[i] = jsonString(m[r.names[i]]) } default: - for i := range jr.names { + for i := range r.names { row[i] = nil } row[0] = jsonString(jsonRow) diff --git a/input_ltsv.go b/input_ltsv.go index b72f7a9..ff6faa9 100644 --- a/input_ltsv.go +++ b/input_ltsv.go @@ -17,87 +17,87 @@ type LTSVRead struct { types []string } -func NewLTSVReader(r io.Reader, opts ReadOpts) (Reader, error) { - lr := <SVRead{} - lr.reader = bufio.NewReader(r) - lr.delimiter = "\t" +func NewLTSVReader(reader io.Reader, opts ReadOpts) (Reader, error) { + r := <SVRead{} + r.reader = bufio.NewReader(reader) + r.delimiter = "\t" if opts.InSkip > 0 { skip := make([]interface{}, 1) for i := 0; i < opts.InSkip; i++ { - r, e := lr.ReadRow(skip) - if e != nil { - log.Printf("ERROR: skip error %s", e) + row, err := r.ReadRow(skip) + if err != nil { + log.Printf("ERROR: skip error %s", err) break } - debug.Printf("Skip row:%s\n", r) + debug.Printf("Skip row:%s\n", row) } } - return lr, nil + return r, nil } // GetColumn is reads the specified number of rows and determines the column name. // The previously read row is stored in preRead. -func (lr *LTSVRead) GetColumn(rowNum int) ([]string, error) { +func (r *LTSVRead) GetColumn(rowNum int) ([]string, error) { names := map[string]bool{} for i := 0; i < rowNum; i++ { - row, keys, err := lr.read() + row, keys, err := r.read() if err != nil { - return lr.names, err + return r.names, err } // Add only unique column names. for k := 0; k < len(keys); k++ { if !names[keys[k]] { names[keys[k]] = true - lr.names = append(lr.names, keys[k]) + r.names = append(r.names, keys[k]) } } - lr.preRead = append(lr.preRead, row) + r.preRead = append(r.preRead, row) } - return lr.names, nil + return r.names, nil } // GetTypes is reads the specified number of rows and determines the column type. -func (lr *LTSVRead) GetTypes() ([]string, error) { - lr.types = make([]string, len(lr.names)) - for i := 0; i < len(lr.names); i++ { - lr.types[i] = DefaultDBType +func (r *LTSVRead) GetTypes() ([]string, error) { + r.types = make([]string, len(r.names)) + for i := 0; i < len(r.names); i++ { + r.types[i] = DefaultDBType } - return lr.types, nil + return r.types, nil } // PreReadRow is returns only columns that store preread rows. -func (lr *LTSVRead) PreReadRow() [][]interface{} { - rowNum := len(lr.preRead) +func (r *LTSVRead) PreReadRow() [][]interface{} { + rowNum := len(r.preRead) rows := make([][]interface{}, rowNum) for n := 0; n < rowNum; n++ { - rows[n] = make([]interface{}, len(lr.names)) - for i := range lr.names { - rows[n][i] = lr.preRead[n][lr.names[i]] + rows[n] = make([]interface{}, len(r.names)) + for i := range r.names { + rows[n][i] = r.preRead[n][r.names[i]] } } return rows } // ReadRow is read the rest of the row. -func (lr *LTSVRead) ReadRow(row []interface{}) ([]interface{}, error) { - record, _, err := lr.read() +func (r *LTSVRead) ReadRow(row []interface{}) ([]interface{}, error) { + record, _, err := r.read() if err != nil { return row, err } - for i := range lr.names { - row[i] = record[lr.names[i]] + for i := range r.names { + row[i] = record[r.names[i]] } return row, nil } -func (lr *LTSVRead) read() (map[string]string, []string, error) { - line, err := lr.readline() +func (r *LTSVRead) read() (map[string]string, []string, error) { + line, err := r.readline() if err != nil { return nil, nil, err } - columns := strings.Split(line, lr.delimiter) + columns := strings.Split(line, r.delimiter) lvs := make(map[string]string) keys := make([]string, 0, len(columns)) for _, column := range columns { @@ -111,9 +111,9 @@ func (lr *LTSVRead) read() (map[string]string, []string, error) { return lvs, keys, nil } -func (lr *LTSVRead) readline() (string, error) { +func (r *LTSVRead) readline() (string, error) { for { - line, _, err := lr.reader.ReadLine() + line, _, err := r.reader.ReadLine() if err != nil { return "", err } diff --git a/output_csv.go b/output_csv.go index eb4af4f..785b0be 100644 --- a/output_csv.go +++ b/output_csv.go @@ -13,39 +13,39 @@ type CSVWrite struct { func NewCSVWrite(writeOpts WriteOpts) *CSVWrite { var err error - c := &CSVWrite{} - c.writer = csv.NewWriter(writeOpts.OutStream) - c.writer.Comma, err = delimiter(writeOpts.OutDelimiter) + w := &CSVWrite{} + w.writer = csv.NewWriter(writeOpts.OutStream) + w.writer.Comma, err = delimiter(writeOpts.OutDelimiter) if err != nil { debug.Printf("%s\n", err) } - c.outHeader = writeOpts.OutHeader - return c + w.outHeader = writeOpts.OutHeader + return w } -// First is output of header and preparation -func (c *CSVWrite) First(columns []string, types []string) error { - if c.outHeader { - err := c.writer.Write(columns) +// PreWrite is output of header and preparation +func (w *CSVWrite) PreWrite(columns []string, types []string) error { + if w.outHeader { + err := w.writer.Write(columns) if err != nil { return err } } - c.results = make([]string, len(columns)) + w.results = make([]string, len(columns)) return nil } // WriteRow is row output -func (c *CSVWrite) WriteRow(values []interface{}, columns []string) error { +func (w *CSVWrite) WriteRow(values []interface{}, columns []string) error { for i, col := range values { - c.results[i] = ValString(col) + w.results[i] = ValString(col) } - err := c.writer.Write(c.results) + err := w.writer.Write(w.results) return err } -// Last is flush -func (c *CSVWrite) Last() error { - c.writer.Flush() +// PostWrite is flush +func (w *CSVWrite) PostWrite() error { + w.writer.Flush() return nil } diff --git a/output_json.go b/output_json.go index 6bea2d3..7558cbd 100644 --- a/output_json.go +++ b/output_json.go @@ -13,25 +13,25 @@ type JSONWrite struct { } func NewJSONWrite(writeOpts WriteOpts) *JSONWrite { - js := &JSONWrite{} - js.writer = json.NewEncoder(writeOpts.OutStream) - js.writer.SetIndent("", " ") - return js + w := &JSONWrite{} + w.writer = json.NewEncoder(writeOpts.OutStream) + w.writer.SetIndent("", " ") + return w } -// First is preparation -func (js *JSONWrite) First(columns []string, types []string) error { - js.results = make([]map[string]interface{}, 0) +// PreWrite is preparation +func (w *JSONWrite) PreWrite(columns []string, types []string) error { + w.results = make([]map[string]interface{}, 0) return nil } // WriteRow is Addition to array -func (js *JSONWrite) WriteRow(values []interface{}, columns []string) error { +func (w *JSONWrite) WriteRow(values []interface{}, columns []string) error { m := make(map[string]interface{}, len(columns)) for i, col := range values { m[columns[i]] = valInterface(col) } - js.results = append(js.results, m) + w.results = append(w.results, m) return nil } @@ -47,7 +47,7 @@ func valInterface(v interface{}) interface{} { } } -// Last is Actual output -func (js *JSONWrite) Last() error { - return js.writer.Encode(js.results) +// PostWrite is Actual output +func (w *JSONWrite) PostWrite() error { + return w.writer.Encode(w.results) } diff --git a/output_ltsv.go b/output_ltsv.go index 2504769..4bd256a 100644 --- a/output_ltsv.go +++ b/output_ltsv.go @@ -13,30 +13,30 @@ type LTSVWrite struct { } func NewLTSVWrite(writeOpts WriteOpts) *LTSVWrite { - lw := <SVWrite{} - lw.delimiter = "\t" - lw.writer = bufio.NewWriter(writeOpts.OutStream) - return lw + w := <SVWrite{} + w.delimiter = "\t" + w.writer = bufio.NewWriter(writeOpts.OutStream) + return w } -// First is preparation -func (lw *LTSVWrite) First(columns []string, types []string) error { - lw.results = make(map[string]string, len(columns)) +// PreWrite is preparation +func (w *LTSVWrite) PreWrite(columns []string, types []string) error { + w.results = make(map[string]string, len(columns)) return nil } // WriteRow is Actual output -func (lw *LTSVWrite) WriteRow(values []interface{}, columns []string) error { +func (w *LTSVWrite) WriteRow(values []interface{}, columns []string) error { results := make([]string, len(values)) for i, col := range values { results[i] = columns[i] + ":" + ValString(col) } - str := strings.Join(results, lw.delimiter) + "\n" - _, err := lw.writer.Write([]byte(str)) + str := strings.Join(results, w.delimiter) + "\n" + _, err := w.writer.Write([]byte(str)) return err } -// Last is flush -func (lw *LTSVWrite) Last() error { - return lw.writer.Flush() +// PostWrite is flush +func (w *LTSVWrite) PostWrite() error { + return w.writer.Flush() } diff --git a/output_raw.go b/output_raw.go index f40ddc8..160bb01 100644 --- a/output_raw.go +++ b/output_raw.go @@ -17,41 +17,41 @@ type RawWrite struct { func NewRAWWrite(writeOpts WriteOpts) *RawWrite { var err error - raw := &RawWrite{} - raw.writer = bufio.NewWriter(writeOpts.OutStream) - raw.sep, err = strconv.Unquote(`"` + writeOpts.OutDelimiter + `"`) + w := &RawWrite{} + w.writer = bufio.NewWriter(writeOpts.OutStream) + w.sep, err = strconv.Unquote(`"` + writeOpts.OutDelimiter + `"`) if err != nil { debug.Printf("%s\n", err) } - raw.outHeader = writeOpts.OutHeader - return raw + w.outHeader = writeOpts.OutHeader + return w } -// First is output of header and preparation -func (raw *RawWrite) First(columns []string, types []string) error { - if raw.outHeader { - _, err := fmt.Fprint(raw.writer, strings.Join(columns, raw.sep), "\n") +// PreWrite is output of header and preparation +func (w *RawWrite) PreWrite(columns []string, types []string) error { + if w.outHeader { + _, err := fmt.Fprint(w.writer, strings.Join(columns, w.sep), "\n") if err != nil { debug.Printf("%s\n", err) } } - raw.results = make([]string, len(columns)) + w.results = make([]string, len(columns)) return nil } // WriteRow is row output -func (raw *RawWrite) WriteRow(values []interface{}, columns []string) error { +func (w *RawWrite) WriteRow(values []interface{}, columns []string) error { for i, col := range values { - raw.results[i] = ValString(col) + w.results[i] = ValString(col) } - _, err := fmt.Fprint(raw.writer, strings.Join(raw.results, raw.sep), "\n") + _, err := fmt.Fprint(w.writer, strings.Join(w.results, w.sep), "\n") if err != nil { debug.Printf("%s\n", err) } return nil } -// Last is flush -func (raw *RawWrite) Last() error { - return raw.writer.Flush() +// PostWrite is flush +func (w *RawWrite) PostWrite() error { + return w.writer.Flush() } diff --git a/output_tablewriter.go b/output_tablewriter.go index 7590f3e..5301a17 100644 --- a/output_tablewriter.go +++ b/output_tablewriter.go @@ -11,35 +11,35 @@ type TWWrite struct { } func NewTWWrite(writeOpts WriteOpts, markdown bool) *TWWrite { - tw := &TWWrite{} - tw.writer = tablewriter.NewWriter(writeOpts.OutStream) - tw.writer.SetAutoFormatHeaders(false) + w := &TWWrite{} + w.writer = tablewriter.NewWriter(writeOpts.OutStream) + w.writer.SetAutoFormatHeaders(false) if markdown { - tw.writer.SetBorders(tablewriter.Border{Left: true, Top: false, Right: true, Bottom: false}) - tw.writer.SetCenterSeparator("|") + w.writer.SetBorders(tablewriter.Border{Left: true, Top: false, Right: true, Bottom: false}) + w.writer.SetCenterSeparator("|") } - return tw + return w } -// First is preparation -func (tw *TWWrite) First(columns []string, types []string) error { - tw.writer.SetHeader(columns) - tw.results = make([]string, len(columns)) +// PreWrite is preparation +func (w *TWWrite) PreWrite(columns []string, types []string) error { + w.writer.SetHeader(columns) + w.results = make([]string, len(columns)) return nil } // WriteRow is Addition to array -func (tw *TWWrite) WriteRow(values []interface{}, columns []string) error { +func (w *TWWrite) WriteRow(values []interface{}, columns []string) error { for i, col := range values { - tw.results[i] = ValString(col) + w.results[i] = ValString(col) } - tw.writer.Append(tw.results) + w.writer.Append(w.results) return nil } -// Last is Actual output -func (tw *TWWrite) Last() error { - tw.writer.Render() +// PostWrite is Actual output +func (w *TWWrite) PostWrite() error { + w.writer.Render() return nil } diff --git a/output_tbln.go b/output_tbln.go index ea2f1d0..d81dfc9 100644 --- a/output_tbln.go +++ b/output_tbln.go @@ -11,13 +11,13 @@ type TBLNWrite struct { } func NewTBLNWrite(writeOpts WriteOpts) *TBLNWrite { - tw := &TBLNWrite{} - tw.writer = tbln.NewWriter(writeOpts.OutStream) - return tw + w := &TBLNWrite{} + w.writer = tbln.NewWriter(writeOpts.OutStream) + return w } -// First is preparation -func (tw *TBLNWrite) First(columns []string, types []string) error { +// PreWrite is preparation +func (w *TBLNWrite) PreWrite(columns []string, types []string) error { d := tbln.NewDefinition() err := d.SetNames(columns) if err != nil { @@ -27,23 +27,23 @@ func (tw *TBLNWrite) First(columns []string, types []string) error { if err != nil { return err } - err = tw.writer.WriteDefinition(d) + err = w.writer.WriteDefinition(d) if err != nil { return err } - tw.results = make([]string, len(columns)) + w.results = make([]string, len(columns)) return nil } // WriteRow is Addition to array -func (tw *TBLNWrite) WriteRow(values []interface{}, columns []string) error { +func (w *TBLNWrite) WriteRow(values []interface{}, columns []string) error { for i, col := range values { - tw.results[i] = ValString(col) + w.results[i] = ValString(col) } - return tw.writer.WriteRow(tw.results) + return w.writer.WriteRow(w.results) } -// Last is Actual output -func (tw *TBLNWrite) Last() error { +// PostWrite is Actual output +func (w *TBLNWrite) PostWrite() error { return nil } diff --git a/output_vertical.go b/output_vertical.go index a2e7bf5..6c023f2 100644 --- a/output_vertical.go +++ b/output_vertical.go @@ -20,39 +20,39 @@ type VFWrite struct { func NewVFWrite(writeOpts WriteOpts) *VFWrite { var err error - vf := &VFWrite{} - vf.writer = bufio.NewWriter(writeOpts.OutStream) - vf.termWidth, _, err = terminal.GetSize(0) + w := &VFWrite{} + w.writer = bufio.NewWriter(writeOpts.OutStream) + w.termWidth, _, err = terminal.GetSize(0) if err != nil { - vf.termWidth = 40 + w.termWidth = 40 } - return vf + return w } -// First is preparation -func (vf *VFWrite) First(columns []string, types []string) error { - vf.header = make([]string, len(columns)) - vf.hSize = 0 +// PreWrite is preparation +func (w *VFWrite) PreWrite(columns []string, types []string) error { + w.header = make([]string, len(columns)) + w.hSize = 0 for i, col := range columns { - if vf.hSize < runewidth.StringWidth(col) { - vf.hSize = runewidth.StringWidth(col) + if w.hSize < runewidth.StringWidth(col) { + w.hSize = runewidth.StringWidth(col) } - vf.header[i] = col + w.header[i] = col } return nil } // WriteRow is Actual output -func (vf *VFWrite) WriteRow(values []interface{}, columns []string) error { - vf.count++ - _, err := fmt.Fprintf(vf.writer, - "---[ %d]%s\n", vf.count, strings.Repeat("-", (vf.termWidth-16))) +func (w *VFWrite) WriteRow(values []interface{}, columns []string) error { + w.count++ + _, err := fmt.Fprintf(w.writer, + "---[ %d]%s\n", w.count, strings.Repeat("-", (w.termWidth-16))) if err != nil { debug.Printf("%s\n", err) } - for i, col := range vf.header { - v := vf.hSize - runewidth.StringWidth(col) - _, err := fmt.Fprintf(vf.writer, + for i, col := range w.header { + v := w.hSize - runewidth.StringWidth(col) + _, err := fmt.Fprintf(w.writer, "%s%s | %-s\n", strings.Repeat(" ", v+2), col, @@ -64,7 +64,7 @@ func (vf *VFWrite) WriteRow(values []interface{}, columns []string) error { return nil } -// Last is flush -func (vf *VFWrite) Last() error { - return vf.writer.Flush() +// PostWrite is flush +func (w *VFWrite) PostWrite() error { + return w.writer.Flush() } diff --git a/writer.go b/writer.go new file mode 100644 index 0000000..73ca979 --- /dev/null +++ b/writer.go @@ -0,0 +1,31 @@ +package trdsql + +// Writer is file format writer +type Writer interface { + PreWrite([]string, []string) error + WriteRow([]interface{}, []string) error + PostWrite() error +} + +func NewWriter(writeOpts WriteOpts) Writer { + switch writeOpts.OutFormat { + case LTSV: + return NewLTSVWrite(writeOpts) + case JSON: + return NewJSONWrite(writeOpts) + case RAW: + return NewRAWWrite(writeOpts) + case MD: + return NewTWWrite(writeOpts, true) + case AT: + return NewTWWrite(writeOpts, false) + case VF: + return NewVFWrite(writeOpts) + case TBLN: + return NewTBLNWrite(writeOpts) + case CSV: + return NewCSVWrite(writeOpts) + default: + return NewCSVWrite(writeOpts) + } +}