-
Notifications
You must be signed in to change notification settings - Fork 1
/
main.go
93 lines (73 loc) · 2.15 KB
/
main.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
package main
import (
"bytes"
"flag"
"github.com/gregarmer/s3pgbackups/config"
"github.com/gregarmer/s3pgbackups/database"
"github.com/gregarmer/s3pgbackups/dest"
"github.com/gregarmer/s3pgbackups/utils"
"io/ioutil"
"log"
"os"
"os/exec"
"path/filepath"
"time"
)
const workingDir = "temp"
var verbose = flag.Bool("v", false, "be verbose")
var noop = flag.Bool("n", false,
"don't actually do anything, just print what would be done")
func main() {
start_time := time.Now()
flag.Parse()
if !*verbose {
log.SetOutput(ioutil.Discard)
}
log.Printf("starting postgres cluster backup")
if *noop {
log.Printf("running in no-op mode, no commands will really be executed")
}
conf := config.LoadConfig()
// Don't print real passwords and secret keys in verbose mode
verbose_config := conf.Copy()
verbose_config.PostgresPassword = "****"
verbose_config.AwsSecretKey = "****"
log.Printf("config: %+v", verbose_config)
// AwsS3
awsS3 := dest.AwsS3{Config: conf}
// Postgres
postgres := database.Postgres{Config: conf}
// create a working directory to store the backups
currentDir, _ := os.Getwd()
fullWorkingDir := filepath.Join(currentDir, workingDir)
if _, err := os.Stat(fullWorkingDir); !os.IsNotExist(err) {
log.Printf("working directory already exists at %s, removing it",
fullWorkingDir)
os.RemoveAll(fullWorkingDir)
}
os.Mkdir(fullWorkingDir, 0700)
// back up the databases
for _, db := range postgres.GetDatabases() {
if conf.ShouldExcludeDb(db) {
log.Printf("[database] skipping '%s' because it's in excludes", db)
} else {
log.Printf("[%s] backing up database", db)
// create backup
backupFileName := postgres.DumpDatabase(db, fullWorkingDir)
// compress backup
var out bytes.Buffer
cmd := exec.Command("gzip", filepath.Join(fullWorkingDir, backupFileName))
cmd.Stdout = &out
err := cmd.Run()
utils.CheckErr(err)
}
}
// walk temp and upload everything to S3
awsS3.UploadTree(fullWorkingDir, noop)
// cleanup working directory
os.RemoveAll(fullWorkingDir)
// rotate old s3 backups
log.Printf("rotating old backups")
awsS3.RotateBackups(noop)
log.Printf("done - took %s", time.Since(start_time))
}