Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions build.bat
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@ echo Performing checks...
go mod tidy && ^
go vet ./... && ^
staticcheck ./... && ^
gofmt -w ./.. && ^
goimports -w ./..
gofmt -w ./ && ^
goimports -w ./
if ERRORLEVEL 1 exit /b %ERRORLEVEL% :: fail if error occurred
echo Checks done!
if %skip%==1 exit
Expand Down
16 changes: 6 additions & 10 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ package main

import (
"flag"
"fmt"
"log"
"os"
"path/filepath"
"time"

"github.com/UTDNebula/api-tools/parser"
Expand Down Expand Up @@ -68,19 +68,15 @@ func main() {

// Make log dir if it doesn't already exist
if _, err := os.Stat(*logDir); err != nil {
os.Mkdir(*logDir, os.ModePerm)
os.MkdirAll(*logDir, os.ModePerm)
}

// Make new log file for this session using timestamp
dateTime := time.Now()
year, month, day := dateTime.Date()
hour, min, sec := dateTime.Clock()
logFile, err := os.Create(fmt.Sprintf("./logs/%d-%d-%dT%d-%d-%d.log", month, day, year, hour, min, sec))

logFileName := time.Now().Format("01-02-2006T15-04-05.log")
logFile, err := os.Create(filepath.Join(*logDir, logFileName))
if err != nil {
log.Fatal(err)
}

defer logFile.Close()
// Set logging output destination to a SplitWriter that writes to both the log file and stdout
log.SetOutput(utils.NewSplitWriter(logFile, os.Stdout))
Expand All @@ -100,7 +96,7 @@ func main() {
scrapers.ScrapeProfiles(*outDir)
case *scrapeCoursebook:
if *term == "" {
log.Panic("No term specified for coursebook scraping! Use -term to specify.")
log.Fatal("No term specified for coursebook scraping! Use -term to specify.")
}
scrapers.ScrapeCoursebook(*term, *startPrefix, *outDir)
case *scrapeOrganizations:
Expand All @@ -114,7 +110,7 @@ func main() {
case *mapFlag:
scrapers.ScrapeMapLocations(*outDir)
default:
log.Panic("You must specify which type of scraping you would like to perform with one of the scraping flags!")
log.Fatal("You must specify which type of scraping you would like to perform with one of the scraping flags!")
}
case *parse:
switch {
Expand Down
40 changes: 16 additions & 24 deletions parser/courseParser_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,14 @@ func TestGetCourse(t *testing.T) {

for name, testCase := range testData {
t.Run(name, func(t *testing.T) {
t.Parallel()

_, courseNum := getInternalClassAndCourseNum(testCase.ClassInfo)
output := *getCourse(courseNum, testCase.Section.Academic_session, testCase.RowInfo, testCase.ClassInfo)
expected := testCase.Course

// skip fields that use primitive.ObjectID or are populated by ReqParser they are already
// covered in parser_test and are awkward to implement here, also mostly out of the scope of course parser
diff := cmp.Diff(expected, output, cmpopts.IgnoreFields(schema.Course{}, "Id", "Sections", "Enrollment_reqs", "Prerequisites"))

if diff != "" {
Expand Down Expand Up @@ -66,20 +70,12 @@ func TestGetCatalogYear(t *testing.T) {
t.Run(name, func(t *testing.T) {
t.Parallel()

defer func() {
// Test fails if we panic when we didn't want to or didn't when we did
if rec := recover(); rec != nil {
if !testCase.Panic {
t.Errorf("unexpected panic for session %q: %v", testCase.Session.Name, rec)
}
} else {
if testCase.Panic {
t.Errorf("expected panic for session %q but got none", testCase.Session.Name)
}
}
}()
if testCase.Panic {
defer FailTestIfNoPanic(t, name)
} else {
defer FailTestIfPanic(t, name)
}

// only call if we *expect* it to succeed
output := getCatalogYear(testCase.Session)
if !testCase.Panic && output != testCase.Expected {
t.Errorf("expected %q, got %q", testCase.Expected, output)
Expand Down Expand Up @@ -133,17 +129,13 @@ func TestGetPrefixAndCourseNum(t *testing.T) {

for name, testCase := range testCases {
t.Run(name, func(t *testing.T) {
defer func() {
if r := recover(); r != nil {
if !testCase.Panic {
t.Errorf("unexpected panic for input %q: %v", name, r)
}
} else {
if testCase.Panic {
t.Errorf("expected panic for input %q but none occurred", name)
}
}
}()
t.Parallel()

if testCase.Panic {
defer FailTestIfNoPanic(t, name)
} else {
defer FailTestIfPanic(t, name)
}

prefix, number := getPrefixAndNumber(testCase.ClassInfo)

Expand Down
123 changes: 48 additions & 75 deletions parser/gradeLoader.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,69 +10,50 @@ import (
"strings"
)

func loadGrades(csvDir string) map[string]map[string][]int {
const NumGradeColumns = 13

// MAP[SEMESTER] -> MAP[SUBJECT + NUMBER + SECTION] -> GRADE DISTRIBUTION
gradeMap := make(map[string]map[string][]int)

if csvDir == "" {
log.Print("No grade data CSV directory specified. Grade data will not be included.")
return gradeMap
}
func loadGrades(csvDir string) error {
log.Print("Beginning grades loading.")

dirPtr, err := os.Open(csvDir)
if err != nil {
panic(err)
}
defer dirPtr.Close()
// MAP[SEMESTER] -> MAP[SUBJECT + NUMBER + SECTION] -> GRADE DISTRIBUTION
GradeMap = make(map[string]map[string][]int)

csvFiles, err := dirPtr.ReadDir(-1)
dir, err := os.ReadDir(csvDir)
if err != nil {
panic(err)
return fmt.Errorf("failed to read grade directory %q: %v", csvDir, err)
}

for _, csvEntry := range csvFiles {

if csvEntry.IsDir() {
for _, entry := range dir {
if entry.IsDir() {
continue
}

csvPath := fmt.Sprintf("%s/%s", csvDir, csvEntry.Name())

csvFile, err := os.Open(csvPath)
result, err := csvToMap(filepath.Join(csvDir, entry.Name()))
if err != nil {
panic(err)
}
defer csvFile.Close()

// Create logs directory
if _, err := os.Stat("./logs/grades"); err != nil {
os.Mkdir("./logs/grades", os.ModePerm)
return fmt.Errorf("failed to process grade file %q: %v", entry.Name(), err)
}
session := strings.TrimSuffix(entry.Name(), ".csv")
GradeMap[session] = result
}

// Create log file [name of csv].log in logs directory
basePath := filepath.Base(csvPath)
csvName := strings.TrimSuffix(basePath, filepath.Ext(basePath))
logFile, err := os.Create("./logs/grades/" + csvName + ".log")
log.Print("Finished loading grades!")
return nil
}

if err != nil {
log.Panic("Could not create CSV log file.")
}
defer logFile.Close()
func csvToMap(csvFilePath string) (map[string][]int, error) {
distroMap := make(map[string][]int)

// Put data from csv into map
gradeMap[csvName] = csvToMap(csvFile, logFile)
csvFile, err := os.Open(csvFilePath)
if err != nil {
return map[string][]int{}, fmt.Errorf("failed to open file: %w", err)
}
defer csvFile.Close()

return gradeMap
}

func csvToMap(csvFile *os.File, logFile *os.File) map[string][]int {
reader := csv.NewReader(csvFile)
records, err := reader.ReadAll() // records is [][]strings
records, err := csv.NewReader(csvFile).ReadAll() // records is [][]strings
if err != nil {
log.Panicf("Error parsing %s: %s", csvFile.Name(), err.Error())
return map[string][]int{}, fmt.Errorf("failed to parse CSV data: %w", err)
}

// look for the subject column and w column
subjectCol := -1
catalogNumberCol := -1
Expand All @@ -82,18 +63,18 @@ func csvToMap(csvFile *os.File, logFile *os.File) map[string][]int {

headerRow := records[0]

for j := 0; j < len(headerRow); j++ {
switch {
case headerRow[j] == "Subject":
subjectCol = j
case headerRow[j] == "Catalog Number" || headerRow[j] == "Catalog Nbr":
catalogNumberCol = j
case headerRow[j] == "Section":
sectionCol = j
case headerRow[j] == "W" || headerRow[j] == "Total W" || headerRow[j] == "W Total":
wCol = j
case headerRow[j] == "A+":
aPlusCol = j
for i, header := range headerRow {
switch header {
case "Subject":
subjectCol = i
case "Catalog Number", "Catalog Nbr":
catalogNumberCol = i
case "Section":
sectionCol = i
case "W", "Total W", "W Total":
wCol = i
case "A+":
aPlusCol = i
}
if wCol == -1 || subjectCol == -1 || catalogNumberCol == -1 || sectionCol == -1 || aPlusCol == -1 {
continue
Expand All @@ -103,38 +84,30 @@ func csvToMap(csvFile *os.File, logFile *os.File) map[string][]int {
}

if wCol == -1 {
logFile.WriteString("could not find W column")
//log.Panicf("could not find W column")
}
if sectionCol == -1 {
logFile.WriteString("could not find Section column")
log.Panicf("could not find Section column")
return map[string][]int{}, fmt.Errorf("could not find Section column")
}
if subjectCol == -1 {
logFile.WriteString("could not find Subject column")
log.Panicf("could not find Subject column")
return map[string][]int{}, fmt.Errorf("could not find Subject column")
}
if catalogNumberCol == -1 {
logFile.WriteString("could not find catalog # column")
log.Panicf("could not find catalog # column")
return map[string][]int{}, fmt.Errorf("could not find catalog # column")
}
if aPlusCol == -1 {
logFile.WriteString("could not find A+ column")
log.Panicf("could not find A+ column")
return map[string][]int{}, fmt.Errorf("could not find A+ column")
}

distroMap := make(map[string][]int)

for _, record := range records {
// convert grade distribution from string to int
intSlice := [14]int{}
for _, record := range records[1:] {
intSlice := [NumGradeColumns + 1]int{}

for j := 0; j < 13; j++ {
for j := 0; j < NumGradeColumns; j++ {
intSlice[j], _ = strconv.Atoi(record[aPlusCol+j])
}
// add w number to the grade_distribution slice
// add w number to the end of grade_distribution slice
if wCol != -1 {
intSlice[13], _ = strconv.Atoi(record[wCol])
intSlice[NumGradeColumns], _ = strconv.Atoi(record[wCol])
}

// add new grade distribution to map, keyed by SUBJECT + NUMBER + SECTION
Expand All @@ -143,5 +116,5 @@ func csvToMap(csvFile *os.File, logFile *os.File) map[string][]int {
distroKey := record[subjectCol] + record[catalogNumberCol] + trimmedSectionNumber
distroMap[distroKey] = intSlice[:]
}
return distroMap
return distroMap, nil
}
Loading
Loading