-
Notifications
You must be signed in to change notification settings - Fork 73
Refactor professor course and section pipelines into a pipeline builder function #296
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: develop
Are you sure you want to change the base?
Changes from all commits
2c22c80
acaac1d
15c56f4
c22c940
3388ccb
f59c0be
9555b84
95d7de4
a74fc7f
f860119
8b48386
6fa43e6
f767ba3
53aa1e9
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -198,6 +198,82 @@ func ProfessorCourseById() gin.HandlerFunc { | |
| } | ||
| } | ||
|
|
||
| // Pipeline builder for professor aggregate endpoints | ||
| func professorPipeline(endpoint string, professorQuery bson.M, paginateMap map[string]int64) mongo.Pipeline { | ||
| // common stages | ||
| baseStages := mongo.Pipeline{ | ||
| // filter the professors | ||
| bson.D{{Key: "$match", Value: professorQuery}}, | ||
|
|
||
| // paginate the professors before pulling the courses from those professor | ||
| bson.D{{Key: "$skip", Value: paginateMap["former_offset"]}}, // skip to the specified offset | ||
| bson.D{{Key: "$limit", Value: paginateMap["limit"]}}, // limit to the specified number of professors | ||
|
|
||
| // lookup the array of sections from sections collection | ||
| bson.D{{Key: "$lookup", Value: bson.D{ | ||
| {Key: "from", Value: "sections"}, | ||
| {Key: "localField", Value: "sections"}, | ||
| {Key: "foreignField", Value: "_id"}, | ||
| {Key: "as", Value: "sections"}, | ||
| }}}, | ||
|
|
||
| // keep order deterministic between calls | ||
| bson.D{{Key: "$sort", Value: bson.D{{Key: "_id", Value: 1}}}}, | ||
| } | ||
|
|
||
| // course pagination stages | ||
| paginationStages := mongo.Pipeline{ | ||
| bson.D{{Key: "$skip", Value: paginateMap["latter_offset"]}}, | ||
| bson.D{{Key: "$limit", Value: paginateMap["limit"]}}, | ||
mikehquan19 marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| } | ||
|
|
||
| if endpoint == "courses" { | ||
| courseStages := mongo.Pipeline{ | ||
| // project the courses referenced by each section in the array | ||
| bson.D{{Key: "$project", Value: bson.D{{Key: "courses", Value: "$sections.course_reference"}}}}, | ||
|
|
||
| // lookup the array of courses from coures collection | ||
| bson.D{{Key: "$lookup", Value: bson.D{ | ||
| {Key: "from", Value: "courses"}, | ||
| {Key: "localField", Value: "courses"}, | ||
| {Key: "foreignField", Value: "_id"}, | ||
| {Key: "as", Value: "courses"}, | ||
| }}}, | ||
|
|
||
| // unwind the courses | ||
| bson.D{{Key: "$unwind", Value: bson.D{ | ||
| {Key: "path", Value: "$courses"}, | ||
| {Key: "preserveNullAndEmptyArrays", Value: false}, // to avoid the professor documents that can't be replaced | ||
| }}}, | ||
|
|
||
| // replace the combination of ids and courses with the courses entirely | ||
| bson.D{{Key: "$replaceWith", Value: "$courses"}}, | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. As you can see here $unwind and $replaceWith are kind of repeated between 2 branches. Can you come up with the way to add them to pagination stages too and just replace the value with the endpoint parameters? |
||
| } | ||
|
|
||
| return append(append(baseStages, courseStages...), paginationStages...) | ||
| } | ||
|
|
||
| if endpoint == "sections" { | ||
| sectionStages := mongo.Pipeline{ | ||
| // project the sections | ||
| bson.D{{Key: "$project", Value: bson.D{{Key: "sections", Value: "$sections"}}}}, | ||
|
|
||
| // unwind the sections | ||
| bson.D{{Key: "$unwind", Value: bson.D{ | ||
| {Key: "path", Value: "$sections"}, | ||
| {Key: "preserveNullAndEmptyArrays", Value: false}, // to avoid the professor documents that can't be replaced | ||
| }}}, | ||
|
|
||
| // replace the combination of ids and sections with the sections entirely | ||
| bson.D{{Key: "$replaceWith", Value: "$sections"}}, | ||
| } | ||
|
|
||
| return append(append(baseStages, sectionStages...), paginationStages...) | ||
| } | ||
|
|
||
| return append(baseStages, paginationStages...) // fallback (shouldn't happen because we call with either courses or sections) | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think here we don't need a fallback, if the endpoint is invalid we will just panic :) |
||
| } | ||
|
|
||
| // Get all of the courses of the professors depending on the type of flag | ||
| func professorCourse(flag string, c *gin.Context) { | ||
| ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) | ||
|
|
@@ -222,49 +298,7 @@ func professorCourse(flag string, c *gin.Context) { | |
| } | ||
|
|
||
| // Pipeline to query the courses from the filtered professors (or a single professor) | ||
| professorCoursePipeline := mongo.Pipeline{ | ||
| // filter the professors | ||
| bson.D{{Key: "$match", Value: professorQuery}}, | ||
|
|
||
| // paginate the professors before pulling the courses from those professor | ||
| bson.D{{Key: "$skip", Value: paginateMap["former_offset"]}}, // skip to the specified offset | ||
| bson.D{{Key: "$limit", Value: paginateMap["limit"]}}, // limit to the specified number of professors | ||
|
|
||
| // lookup the array of sections from sections collection | ||
| bson.D{{Key: "$lookup", Value: bson.D{ | ||
| {Key: "from", Value: "sections"}, | ||
| {Key: "localField", Value: "sections"}, | ||
| {Key: "foreignField", Value: "_id"}, | ||
| {Key: "as", Value: "sections"}, | ||
| }}}, | ||
|
|
||
| // project the courses referenced by each section in the array | ||
| bson.D{{Key: "$project", Value: bson.D{{Key: "courses", Value: "$sections.course_reference"}}}}, | ||
|
|
||
| // lookup the array of courses from coures collection | ||
| bson.D{{Key: "$lookup", Value: bson.D{ | ||
| {Key: "from", Value: "courses"}, | ||
| {Key: "localField", Value: "courses"}, | ||
| {Key: "foreignField", Value: "_id"}, | ||
| {Key: "as", Value: "courses"}, | ||
| }}}, | ||
|
|
||
| // unwind the courses | ||
| bson.D{{Key: "$unwind", Value: bson.D{ | ||
| {Key: "path", Value: "$courses"}, | ||
| {Key: "preserveNullAndEmptyArrays", Value: false}, // to avoid the professor documents that can't be replaced | ||
| }}}, | ||
|
|
||
| // replace the combination of ids and courses with the courses entirely | ||
| bson.D{{Key: "$replaceWith", Value: "$courses"}}, | ||
|
|
||
| // keep order deterministic between calls | ||
| bson.D{{Key: "$sort", Value: bson.D{{Key: "_id", Value: 1}}}}, | ||
|
|
||
| // paginate the courses | ||
| bson.D{{Key: "$skip", Value: paginateMap["latter_offset"]}}, | ||
| bson.D{{Key: "$limit", Value: paginateMap["limit"]}}, | ||
| } | ||
| professorCoursePipeline := professorPipeline("courses", professorQuery, paginateMap) | ||
|
|
||
| // Perform aggreration on the pipeline | ||
| cursor, err := professorCollection.Aggregate(ctx, professorCoursePipeline) | ||
|
|
@@ -354,41 +388,7 @@ func professorSection(flag string, c *gin.Context) { | |
| } | ||
|
|
||
| // Pipeline to query the courses from the filtered professors (or a single professor) | ||
| professorSectionPipeline := mongo.Pipeline{ | ||
| // filter the professors | ||
| bson.D{{Key: "$match", Value: professorQuery}}, | ||
|
|
||
| // paginate the professors before pulling the courses from those professor | ||
| bson.D{{Key: "$skip", Value: paginateMap["former_offset"]}}, // skip to the specified offset | ||
| bson.D{{Key: "$limit", Value: paginateMap["limit"]}}, // limit to the specified number of professors | ||
|
|
||
| // lookup the array of sections from sections collection | ||
| bson.D{{Key: "$lookup", Value: bson.D{ | ||
| {Key: "from", Value: "sections"}, | ||
| {Key: "localField", Value: "sections"}, | ||
| {Key: "foreignField", Value: "_id"}, | ||
| {Key: "as", Value: "sections"}, | ||
| }}}, | ||
|
|
||
| // project the sections | ||
| bson.D{{Key: "$project", Value: bson.D{{Key: "sections", Value: "$sections"}}}}, | ||
|
|
||
| // unwind the sections | ||
| bson.D{{Key: "$unwind", Value: bson.D{ | ||
| {Key: "path", Value: "$sections"}, | ||
| {Key: "preserveNullAndEmptyArrays", Value: false}, // to avoid the professor documents that can't be replaced | ||
| }}}, | ||
|
|
||
| // replace the combination of ids and sections with the sections entirely | ||
| bson.D{{Key: "$replaceWith", Value: "$sections"}}, | ||
|
|
||
| // keep order deterministic between calls | ||
| bson.D{{Key: "$sort", Value: bson.D{{Key: "_id", Value: 1}}}}, | ||
|
|
||
| // paginate the sections | ||
| bson.D{{Key: "$skip", Value: paginateMap["latter_offset"]}}, | ||
| bson.D{{Key: "$limit", Value: paginateMap["limit"]}}, | ||
| } | ||
| professorSectionPipeline := professorPipeline("sections", professorQuery, paginateMap) | ||
|
|
||
| // Perform aggreration on the pipeline | ||
| cursor, err := professorCollection.Aggregate(ctx, professorSectionPipeline) | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I think
$sortcould belong to the pagination stages since they are supposed to be sorted right before we paginate the second time