Simple golang program which compares structure of the source PostgreSQL table with target table on Bigquery. If Bigquery table misses some columns schema is updated. I removed some parts so program as it is listed below does not work immediately. But it can be used as an inspiration.
package main import ( "context" "flag" "fmt" "log" "path/filepath" "google.golang.org/api/iterator" "cloud.google.com/go/bigquery" ) func main() { paramDebug := flag.Bool("debug", false, "print debug messages") paramDebugLevel := flag.Int("debug_level", 0, "level of debug messages: 0 = default, common messages from run / 1 = show variables, queries etc. / 2 = deep debug, shows inserted data etc.") paramHelp := flag.Bool("help", false, "print help") paramBqDataset := flag.String("bqdataset", "", "name of dataset on bigquery / mask of the name") paramBqTable := flag.String("bqtable", "", "name of table on bigquery / mask of the name") flag.Parse() pcg.PrintDebugMsg = *paramDebug pcg.DebugLevel = *paramDebugLevel printHelpMsg := *paramHelp bqDatasetID := *paramBqDataset bqTable := *paramBqTable pcg.DebugMsg(1, fmt.Sprintf("pcg.PrintDebugMsg: %v", pcg.PrintDebugMsg)) pcg.DebugMsg(1, fmt.Sprintf("printHelpMsg: %v", printHelpMsg)) pcg.DebugMsg(1, fmt.Sprintf("bqDatasetID: %s", bqDatasetID)) pcg.DebugMsg(1, fmt.Sprintf("bqTable: %s", bqTable)) if printHelpMsg == true { flag.PrintDefaults() log.Fatal() } ctx := context.Background() bqDB, err := bigquery.NewClient(ctx, pcg.ProjectID) if err != nil { log.Fatalln("Cannot create new BQ client: ", err) } var tableMatch bool bqdatasets := bqDB.Datasets(ctx) for { dataset, err := bqdatasets.Next() if err == iterator.Done { break } if err != nil { log.Fatalf("Cannot not iterate over datasets %s: %s", dataset, err.Error()) } datasetName := dataset.DatasetID pcg.DebugMsg(1, "dataset: ", datasetName) if bqDatasetID == "" || ((bqDatasetID != "") && (bqDatasetID == datasetName)) { datasetMeta, err := dataset.Metadata(ctx) if err == nil { pcg.DebugMsg(2, "dataset meta: ", datasetMeta.Name, " - ", datasetMeta.Location) } else { pcg.PrintMsg("error in checking dataset metadata: ", err) } bqtables := dataset.Tables(ctx) for { table, err := bqtables.Next() if err == iterator.Done { break } if err != nil { log.Fatalf("Cannot not iterate over table %s: %s", table, err.Error()) } tableName := table.TableID pcg.DebugMsg(1, "table: ", tableName) if bqTable != "" { tableMatch, err = filepath.Match(bqTable, tableName) if err != nil { log.Fatal("cannot check table name match: ", err) } pcg.DebugMsg(2, "tableMatch: ", tableMatch) } if bqTable == "" || ((bqTable != "") && (tableMatch == true)) { tableMeta, err := table.Metadata(ctx) if err == nil { sch := tableMeta.Schema pcg.DebugMsg(2, "columns: ", len(sch)) for i := range sch { col := sch[i] pcg.DebugMsg(2, "columns: ", i, ": ", col.Name, " - ", col.Required, " - ", col.Type) if col.Required == true { pcg.PrintMsg(dataset.DatasetID, ".", table.TableID, " - ", col.Name) } } } else { pcg.PrintMsg("error in checking table metadata: ", err) } } } } } }