forked from UTDNebula/api-tools
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain.go
More file actions
143 lines (125 loc) · 5.17 KB
/
main.go
File metadata and controls
143 lines (125 loc) · 5.17 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
package main
import (
"flag"
"fmt"
"log"
"os"
"time"
"github.com/UTDNebula/api-tools/parser"
"github.com/UTDNebula/api-tools/scrapers"
"github.com/UTDNebula/api-tools/uploader"
"github.com/UTDNebula/api-tools/utils"
"github.com/joho/godotenv"
)
func main() {
// Load environment variables
godotenv.Load()
// Setup flags
// I/O Flags
inDir := flag.String("i", "./data", "The directory to read data from. Defaults to ./data.")
outDir := flag.String("o", "./data", "The directory to write resulting data to. Defaults to ./data.")
logDir := flag.String("l", "./logs", "The directory to write logs to. Defaults to ./logs.")
// Flags for all scraping
scrape := flag.Bool("scrape", false, "Puts the tool into scraping mode.")
// Flags for coursebook scraping
scrapeCoursebook := flag.Bool("coursebook", false, "Alongside -scrape, signifies that coursebook should be scraped.")
term := flag.String("term", "", "Alongside -coursebook, specifies the term to scrape, i.e. 23S")
startPrefix := flag.String("startprefix", "", "Alongside -coursebook, specifies the course prefix to start scraping from, i.e. cp_span")
// Flag for profile scraping
scrapeProfiles := flag.Bool("profiles", false, "Alongside -scrape, signifies that professor profiles should be scraped.")
// Flag for soc scraping
scrapeOrganizations := flag.Bool("organizations", false, "Alongside -scrape, signifies that SOC organizations should be scraped.")
// Flag for calendar scraping
scrapeCalendar := flag.Bool("calendar", false, "Alongside -scrape, signifies that calendar should be scraped.")
// Flag for astra scraping and parsing
astra := flag.Bool("astra", false, "Alongside -scrape or -parse, signifies that Astra should be scraped/parsed.")
// Flag for mazevo scraping and parsing
mazevo := flag.Bool("mazevo", false, "Alongside -scrape or -parse, signifies that Mazevo should be scraped/parsed.")
// Flag for map scraping, parsing, and uploading
mapFlag := flag.Bool("map", false, "Alongside -scrape, -parse, or -upload, signifies that the UTD map should be scraped/parsed/uploaded.")
// Flags for parsing
parse := flag.Bool("parse", false, "Puts the tool into parsing mode.")
csvDir := flag.String("csv", "./grade-data", "Alongside -parse, specifies the path to the directory of CSV files containing grade data.")
skipValidation := flag.Bool("skipv", false, "Alongside -parse, signifies that the post-parsing validation should be skipped. Be careful with this!")
// Flags for uploading data
upload := flag.Bool("upload", false, "Puts the tool into upload mode.")
replace := flag.Bool("replace", false, "Alongside -upload, specifies that uploaded data should replace existing data rather than being merged.")
staticOnly := flag.Bool("static", false, "Alongside -upload, specifies that we should only build and upload the static aggregations.")
events := flag.Bool("events", false, "Alongside -upload, signifies that Astra and Mazevo should be uploaded.")
// Flags for logging
verbose := flag.Bool("verbose", false, "Enables verbose logging, good for debugging purposes.")
// Flag for headless mode
headless := flag.Bool("headless", false, "Enables headless mode for chromedp.")
// Parse flags
flag.Parse()
// Make log dir if it doesn't already exist
if _, err := os.Stat(*logDir); err != nil {
os.Mkdir(*logDir, os.ModePerm)
}
// Make new log file for this session using timestamp
dateTime := time.Now()
year, month, day := dateTime.Date()
hour, min, sec := dateTime.Clock()
logFile, err := os.Create(fmt.Sprintf("./logs/%d-%d-%dT%d-%d-%d.log", month, day, year, hour, min, sec))
if err != nil {
log.Fatal(err)
}
defer logFile.Close()
// Set logging output destination to a SplitWriter that writes to both the log file and stdout
log.SetOutput(utils.NewSplitWriter(logFile, os.Stdout))
// Do verbose logging if verbose flag specified
if *verbose {
log.SetFlags(log.Ltime | log.Lmicroseconds | log.Lshortfile | utils.Lverbose)
} else {
log.SetFlags(log.Ltime)
}
// Perform actions based on flags
utils.Headless = *headless
switch {
case *scrape:
switch {
case *scrapeProfiles:
scrapers.ScrapeProfiles(*outDir)
case *scrapeCoursebook:
if *term == "" {
log.Panic("No term specified for coursebook scraping! Use -term to specify.")
}
scrapers.ScrapeCoursebook(*term, *startPrefix, *outDir)
case *scrapeOrganizations:
scrapers.ScrapeOrganizations(*outDir)
case *scrapeCalendar:
scrapers.ScrapeCalendar(*outDir)
case *astra:
scrapers.ScrapeAstra(*outDir)
case *mazevo:
scrapers.ScrapeMazevo(*outDir)
case *mapFlag:
scrapers.ScrapeMapLocations(*outDir)
default:
log.Panic("You must specify which type of scraping you would like to perform with one of the scraping flags!")
}
case *parse:
switch {
case *astra:
parser.ParseAstra(*inDir, *outDir)
case *mazevo:
parser.ParseMazevo(*inDir, *outDir)
case *mapFlag:
parser.ParseMapLocations(*inDir, *outDir)
default:
parser.Parse(*inDir, *outDir, *csvDir, *skipValidation)
}
case *upload:
switch {
case *events:
uploader.UploadEvents(*inDir)
case *mapFlag:
uploader.UploadMapLocations(*inDir)
default:
uploader.Upload(*inDir, *replace, *staticOnly)
}
default:
flag.PrintDefaults()
return
}
}