Skip to content

Commit 91f6516

Browse files
I do refactoring all day (#342)
* I do refactoring all day * test example * tests * some tests * more tests * rewrited test outpit? * fix tests for pr 5 and 20 * fix tests for pr 1 and 83 * fix pr39 * fix prs 74,99 * fix pr 101 * fix pr 73 * fixed test * fix pr143 * fix pr100 * found pr that fails * another one * add test pr138 * removed 138 * fix pr 193 * fix pr 270 --------- Co-authored-by: dejan-crocoder <dejan@crocoder.dev>
1 parent f387161 commit 91f6516

File tree

2 files changed

+1514
-70
lines changed

2 files changed

+1514
-70
lines changed

packages/functions/transform/src/merge-request-metrics.ts

Lines changed: 129 additions & 70 deletions
Original file line numberDiff line numberDiff line change
@@ -342,7 +342,6 @@ function calculateMrSize(mergeRequestId: number, diffs: { stringifiedHunks: stri
342342
.reduce((a, b) => a + b, 0);
343343
}
344344

345-
346345
return mrSize;
347346
}
348347

@@ -361,15 +360,16 @@ type MergeRequestData = {
361360
authorExternalId: extract.MergeRequest['authorExternalId']
362361
}
363362

364-
type TimelineEventData = {
363+
export type TimelineEventData = {
365364
type: extract.TimelineEvents['type'];
366365
timestamp: extract.TimelineEvents['timestamp'];
367366
actorId: extract.TimelineEvents['actorId'];
368367
data: extract.TimelineEvents['data'];
369368
}
370369

371-
type MergeRequestNoteData = {
372-
createdAt: extract.MergeRequestNote['createdAt'];
370+
export type MergeRequestNoteData = {
371+
type: 'note';
372+
timestamp: extract.MergeRequestNote['createdAt'];
373373
authorExternalId: extract.MergeRequestNote['authorExternalId'];
374374
}
375375

@@ -411,12 +411,12 @@ async function selectExtractData(db: ExtractDatabase, extractMergeRequestId: num
411411
.all();
412412

413413
const mergeRequestNotesData = await db.select({
414-
createdAt: mergeRequestNotes.createdAt,
414+
timestamp: mergeRequestNotes.createdAt,
415415
authorExternalId: mergeRequestNotes.authorExternalId,
416416
})
417417
.from(mergeRequestNotes)
418418
.where(eq(mergeRequestNotes.mergeRequestId, extractMergeRequestId))
419-
.all() satisfies MergeRequestNoteData[];
419+
.all() satisfies Omit<MergeRequestNoteData, 'type'>[];
420420

421421
const timelineEventsData = await db.select({
422422
type: timelineEvents.type,
@@ -431,7 +431,7 @@ async function selectExtractData(db: ExtractDatabase, extractMergeRequestId: num
431431
return {
432432
diffs: mergerRequestDiffsData,
433433
...mergeRequestData || { mergeRequest: null },
434-
notes: mergeRequestNotesData,
434+
notes: mergeRequestNotesData.map(note => ({ ...note, type: 'note' as const })),
435435
timelineEvents: timelineEventsData,
436436
...repositoryData || { repository: null },
437437
};
@@ -442,114 +442,172 @@ export type RunContext = {
442442
transformDatabase: TransformDatabase;
443443
};
444444

445-
type TimelineMapKey = {
445+
export type TimelineMapKey = {
446446
type: extract.TimelineEvents['type'] | 'note',
447447
timestamp: Date,
448-
actorId: extract.TimelineEvents['actorId'] | extract.MergeRequestNote['authorExternalId'] | null,
449448
}
449+
450450
function setupTimeline(timelineEvents: TimelineEventData[], notes: MergeRequestNoteData[]) {
451451
const timeline = new Map<TimelineMapKey,
452452
TimelineEventData | MergeRequestNoteData
453453
>();
454454

455-
456455
for (const timelineEvent of timelineEvents) {
457456
timeline.set({
458457
type: timelineEvent.type,
459458
timestamp: timelineEvent.timestamp,
460-
actorId: timelineEvent.actorId,
461459
}, timelineEvent);
462460
}
463461

464462
for (const note of notes) {
465463
timeline.set({
466464
type: 'note',
467-
timestamp: note.createdAt,
468-
actorId: note.authorExternalId,
465+
timestamp: note.timestamp,
469466
}, note);
470467
}
471468

472469
return timeline;
473470

474471
}
475472

476-
function runTimeline(extractMergeRequest: MergeRequestData, timelineEvents: TimelineEventData[], notes: MergeRequestNoteData[]) {
473+
type calcTimelineArgs = {
474+
authorExternalId: extract.MergeRequest['authorExternalId'],
475+
}
477476

478-
const timelineMap = setupTimeline(timelineEvents, notes);
479-
const timelineMapKeys = [...timelineMap.keys()];
477+
export function calculateTimeline(timelineMapKeys: TimelineMapKey[], timelineMap: Map<TimelineMapKey, MergeRequestNoteData | TimelineEventData>, { authorExternalId }: calcTimelineArgs) {
478+
479+
const commitedEvents = timelineMapKeys.filter(key => key.type === 'committed');
480+
commitedEvents.sort((a, b) => a.timestamp.getTime() - b.timestamp.getTime());
481+
482+
const firstCommitEvent = commitedEvents[0] || null;
483+
const lastCommitEvent = commitedEvents[commitedEvents.length - 1] || null;
480484

481-
//start coding at
485+
const startedCodingAt = firstCommitEvent ? firstCommitEvent.timestamp : null;
482486

483-
const committedEvents = timelineMapKeys.filter(({ type }) => type === 'committed') as (TimelineMapKey & { type: 'committed' })[];
487+
const mergedEvents = timelineMapKeys.filter(key => key.type === 'merged');
488+
mergedEvents.sort((a, b) => a.timestamp.getTime() - b.timestamp.getTime());
484489

485-
let startedCodingAt: Date | null = null;
490+
const mergedAt = mergedEvents[0]?.timestamp || null;
486491

487-
if (committedEvents.length > 0) {
492+
const readyForReviewEvents = timelineMapKeys.filter(key => key.type === 'ready_for_review' || key.type === 'review_requested');
493+
readyForReviewEvents.sort((a, b) => a.timestamp.getTime() - b.timestamp.getTime());
494+
const lastReadyForReviewEvent = readyForReviewEvents[readyForReviewEvents.length - 1] || null;
488495

489-
for (const committedEvent of committedEvents) {
490-
if (!startedCodingAt) {
491-
startedCodingAt = committedEvent.timestamp;
496+
const startedPickupAt = (() => {
497+
if (lastCommitEvent === null && lastReadyForReviewEvent === null) {
498+
return null;
499+
}
500+
if (lastReadyForReviewEvent === null && lastCommitEvent) {
501+
// problematic code: everything below is problematic
502+
const reviewedEventsBeforeLastCommitEvent = timelineMapKeys.filter(key => key.type === 'reviewed' && key.timestamp < lastCommitEvent.timestamp);
503+
reviewedEventsBeforeLastCommitEvent.sort((a, b) => a.timestamp.getTime() - b.timestamp.getTime());
504+
const firstReviewedEventBeforeLastCommitEvent = reviewedEventsBeforeLastCommitEvent[0];
505+
if (firstReviewedEventBeforeLastCommitEvent) {
506+
return [...commitedEvents].reverse().find(event => event.timestamp < firstReviewedEventBeforeLastCommitEvent.timestamp)?.timestamp || null;
492507
}
493-
else if (committedEvent.timestamp.getTime() < startedCodingAt.getTime()) {
494-
startedCodingAt = committedEvent.timestamp;
508+
509+
return lastCommitEvent.timestamp;
510+
}
511+
if (lastReadyForReviewEvent && lastCommitEvent) {
512+
// problematic code: there could be a commit between last commit and lastReadyForReviewEvent
513+
const reviewedEventsAfterLastReadyForReviewEvent = timelineMapKeys.filter(
514+
key =>
515+
key.type === 'reviewed'
516+
&& key.timestamp > lastReadyForReviewEvent.timestamp
517+
&& key.timestamp < lastCommitEvent.timestamp
518+
);
519+
reviewedEventsAfterLastReadyForReviewEvent.sort((a, b) => a.timestamp.getTime() - b.timestamp.getTime());
520+
const firstReviewedEventAfterLastReadyForReviewEvent = reviewedEventsAfterLastReadyForReviewEvent[0]
521+
522+
if (firstReviewedEventAfterLastReadyForReviewEvent) {
523+
const temp = [...commitedEvents].reverse().find(
524+
event => event.timestamp > lastReadyForReviewEvent.timestamp
525+
&& event.timestamp < firstReviewedEventAfterLastReadyForReviewEvent.timestamp
526+
527+
)?.timestamp || null;
528+
529+
if (temp) {
530+
return temp;
531+
}
532+
return lastReadyForReviewEvent.timestamp;
495533
}
534+
return lastReadyForReviewEvent.timestamp > lastCommitEvent.timestamp ? lastReadyForReviewEvent.timestamp : lastCommitEvent.timestamp;
496535
}
497536

498-
}
537+
return null;
538+
})();
499539

500-
// start review at
540+
let firstReviewedEvent = null;
541+
let reviewed = false;
542+
let reviewDepth = 0;
501543

502-
const reviewEvents = timelineMapKeys.filter(({ type }) => type === 'note' || type === 'reviewed' || type === 'commented') as (TimelineMapKey & { type: 'note' | 'reviewed' | 'commented' })[];
503-
let startedReviewAt: Date | null = null;
544+
const noteEvents = timelineMapKeys.filter(key => key.type === 'note');
545+
for (const noteEvent of noteEvents) {
546+
const eventData = timelineMap.get(noteEvent) as MergeRequestNoteData | undefined;
547+
if (!eventData) {
548+
console.error('note event data not found', noteEvent);
549+
continue;
550+
}
504551

505-
if (reviewEvents.length > 0) {
506-
for (const reviewEvent of reviewEvents) {
507-
if (!startedReviewAt && reviewEvent.actorId !== extractMergeRequest.authorExternalId) {
508-
startedReviewAt = reviewEvent.timestamp;
509-
}
510-
if (startedReviewAt && reviewEvent.timestamp.getTime() < startedReviewAt.getTime()) {
511-
startedReviewAt = reviewEvent.timestamp;
512-
}
552+
const afterStartedPickupAt = startedPickupAt ? noteEvent.timestamp > startedPickupAt : true;
553+
const beforeMergedEvent = mergedAt ? noteEvent.timestamp < mergedAt : true;
554+
const isAuthorReviewer = eventData.authorExternalId === authorExternalId;
555+
if (afterStartedPickupAt && beforeMergedEvent && !isAuthorReviewer) {
556+
reviewDepth++;
513557
}
514558
}
515559

516-
// start pickup at
517-
518-
const convertToDraftEvents = timelineMapKeys.filter(({ type }) => type === 'convert_to_draft') as (TimelineMapKey & { type: 'convert_to_draft' })[];
519-
let lastConvertToDraftBeforeReview: Date | null = null;
560+
const reviewedEvents = timelineMapKeys.filter(key => key.type === 'reviewed' && key.timestamp < (mergedAt || new Date()));
561+
for (const reviewedEvent of reviewedEvents) {
562+
const eventData = timelineMap.get(reviewedEvent);
563+
if (!eventData) {
564+
console.error('reviewed event data not found', reviewedEvent);
565+
continue;
566+
}
567+
const res = extract.ReviewedEventSchema.safeParse((eventData as TimelineEventData).data);
568+
if (!res.success) {
569+
console.error(res.error);
570+
continue;
571+
}
572+
const isValidState = res.data.state === 'approved' || res.data.state === 'changes_requested' || res.data.state === 'commented';
573+
const afterStartedPickupAt = startedPickupAt ? reviewedEvent.timestamp > startedPickupAt : true;
574+
const beforeFirstReviewedEvent = firstReviewedEvent ? reviewedEvent.timestamp < firstReviewedEvent.timestamp : true;
575+
const beforeMergedEvent = mergedAt ? reviewedEvent.timestamp < mergedAt : true;
576+
const isAuthorReviewer = (eventData as TimelineEventData).actorId === authorExternalId;
577+
578+
if (isValidState && afterStartedPickupAt && beforeMergedEvent && !isAuthorReviewer) {
579+
reviewed = true;
580+
reviewDepth++;
581+
}
520582

521-
for (const convertToDraft of convertToDraftEvents) {
522-
if (
523-
(!lastConvertToDraftBeforeReview || convertToDraft.timestamp.getTime() > lastConvertToDraftBeforeReview.getTime())
524-
&& (!startedReviewAt || convertToDraft.timestamp.getTime() < startedReviewAt.getTime())
525-
) {
526-
lastConvertToDraftBeforeReview = convertToDraft.timestamp;
583+
if (isValidState && afterStartedPickupAt && beforeFirstReviewedEvent && !isAuthorReviewer) {
584+
reviewed = true;
585+
firstReviewedEvent = reviewedEvent;
527586
}
528587
}
529588

530-
let startedPickupAt: Date | null = null;
531-
const initialPickupEvents = timelineMapKeys.filter(({ type, timestamp }) => type === 'ready_for_review' || type === 'review_requested'
532-
&& (!lastConvertToDraftBeforeReview || timestamp.getTime() > lastConvertToDraftBeforeReview.getTime())
533-
&& (!startedReviewAt || timestamp.getTime() < startedReviewAt.getTime())) as (TimelineMapKey & { type: 'ready_for_review' | 'review_requested' })[];
589+
return {
590+
startedCodingAt,
591+
startedPickupAt,
592+
startedReviewAt: firstReviewedEvent ? firstReviewedEvent.timestamp : null,
593+
mergedAt,
594+
reviewed,
595+
reviewDepth,
596+
};
597+
}
534598

535-
for (const pickupEvent of initialPickupEvents) {
536-
if (!startedPickupAt || pickupEvent.timestamp.getTime() < startedPickupAt.getTime()) {
537-
startedPickupAt = pickupEvent.timestamp;
538-
}
539-
}
540599

541-
if (startedReviewAt && !startedPickupAt) {
542-
for (const committedEvent of committedEvents) {
543-
if (!startedPickupAt && committedEvent.timestamp.getTime() < startedReviewAt.getTime()) startedPickupAt = committedEvent.timestamp;
544-
if (startedPickupAt
545-
&& committedEvent.timestamp.getTime() > startedPickupAt.getTime()
546-
&& committedEvent.timestamp.getTime() < startedReviewAt.getTime()) startedPickupAt = committedEvent.timestamp;
547-
}
548-
}
549600

550-
if (startedReviewAt && !startedPickupAt) {
551-
startedPickupAt = extractMergeRequest.openedAt;
552-
}
601+
function runTimeline(mergeRequestData: MergeRequestData, timelineEvents: TimelineEventData[], notes: MergeRequestNoteData[]) {
602+
const timelineMap = setupTimeline(timelineEvents, notes);
603+
const timelineMapKeys = [...timelineMap.keys()];
604+
605+
const { startedCodingAt, startedReviewAt, startedPickupAt, reviewed, reviewDepth } = calculateTimeline(
606+
timelineMapKeys,
607+
timelineMap,
608+
{
609+
authorExternalId: mergeRequestData.authorExternalId,
610+
});
553611

554612
// TODO: can this be optimized with the map ?
555613
const approved = timelineEvents.find(ev => ev.type === 'reviewed' && (JSON.parse(ev.data as string) as extract.ReviewedEvent).state === 'approved') !== undefined;
@@ -558,13 +616,14 @@ function runTimeline(extractMergeRequest: MergeRequestData, timelineEvents: Time
558616
startedCodingAt,
559617
startedReviewAt,
560618
startedPickupAt,
561-
reviewed: startedReviewAt !== null,
619+
reviewed,
620+
reviewDepth,
562621
approved,
563-
reviewDepth: reviewEvents.length,
564-
};
622+
}
565623
}
566624

567625

626+
568627
export async function run(extractMergeRequestId: number, ctx: RunContext) {
569628
const extractData = await selectExtractData(ctx.extractDatabase, extractMergeRequestId);
570629

0 commit comments

Comments
 (0)