File tree Expand file tree Collapse file tree
Expand file tree Collapse file tree Original file line number Diff line number Diff line change 1+ package main
2+
3+ import (
4+ "context"
5+ "log"
6+ "os"
7+ "path/filepath"
8+ "bufio"
9+ "strings"
10+
11+ "go.uber.org/zap"
12+ "pixelmap.io/backend/internal/ingestor"
13+ )
14+
15+ func loadEnv () {
16+ envPath := ".env"
17+ if _ , err := os .Stat (envPath ); os .IsNotExist (err ) {
18+ envPath = filepath .Join (".." , ".env" )
19+ }
20+
21+ file , err := os .Open (envPath )
22+ if err != nil {
23+ log .Printf ("Warning: Could not load .env file: %v" , err )
24+ return
25+ }
26+ defer file .Close ()
27+
28+ scanner := bufio .NewScanner (file )
29+ for scanner .Scan () {
30+ line := scanner .Text ()
31+ if line == "" || strings .HasPrefix (line , "#" ) {
32+ continue
33+ }
34+
35+ parts := strings .SplitN (line , "=" , 2 )
36+ if len (parts ) == 2 {
37+ key := strings .TrimSpace (parts [0 ])
38+ value := strings .TrimSpace (parts [1 ])
39+ value = strings .Trim (value , `"'` )
40+ os .Setenv (key , value )
41+ }
42+ }
43+ }
44+
45+ func main () {
46+ // Load environment variables
47+ loadEnv ()
48+
49+ // Create logger
50+ logger , _ := zap .NewProduction ()
51+ defer logger .Sync ()
52+
53+ // Create S3 syncer
54+ s3Syncer , err := ingestor .NewS3Syncer (logger , "cache" )
55+ if err != nil {
56+ log .Fatal ("Failed to create S3 syncer:" , err )
57+ }
58+
59+ log .Println ("Starting S3 sync..." )
60+
61+ // Sync to S3
62+ ctx := context .Background ()
63+ if err := s3Syncer .SyncWithS3 (ctx ); err != nil {
64+ log .Fatal ("Failed to sync to S3:" , err )
65+ }
66+
67+ log .Println ("S3 sync complete!" )
68+ }
Original file line number Diff line number Diff line change 1+ #! /bin/bash
2+
3+ # Load AWS credentials from .env if needed
4+ if [ -f .env ]; then
5+ export $( cat .env | grep -v ' ^#' | xargs)
6+ fi
7+
8+ # S3 bucket name (from the Go code we saw)
9+ BUCKET=" pixelmap.art"
10+
11+ echo " Syncing cache directory to S3 bucket: $BUCKET "
12+
13+ # Sync the entire cache directory to S3
14+ # --delete removes files from S3 that don't exist locally
15+ # Remove --delete if you want to keep old files
16+ aws s3 sync cache/ s3://$BUCKET / \
17+ --exclude " .DS_Store" \
18+ --exclude " *.log" \
19+ --acl public-read
20+
21+ echo " Sync complete!"
22+
23+ # Optionally, you can sync just specific directories:
24+ # aws s3 sync cache/tile/ s3://$BUCKET/tile/ --acl public-read
25+ # aws s3 cp cache/tiledata.json s3://$BUCKET/tiledata.json --acl public-read
You can’t perform that action at this time.
0 commit comments