Skip to content

Commit 6cb0a93

Browse files
committed
Merge branch 'main' into python-311-update
2 parents 7ea1ab0 + 336304c commit 6cb0a93

File tree

16 files changed

+104
-64
lines changed

16 files changed

+104
-64
lines changed

client/dive-common/apispec.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,7 @@ interface Api {
168168
getTileURL?(itemId: string, x: number, y: number, level: number, query: Record<string, any>):
169169
string;
170170
importAnnotationFile(id: string, path: string, file?: File,
171-
additive?: boolean, additivePrepend?: string, set?: string): Promise<boolean>;
171+
additive?: boolean, additivePrepend?: string, set?: string): Promise<boolean | string[]>;
172172
}
173173
const ApiSymbol = Symbol('api');
174174

client/dive-common/components/ImportAnnotations.vue

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ export default defineComponent({
5252
if (!ret.canceled) {
5353
menuOpen.value = false;
5454
const path = ret.filePaths[0];
55-
let importFile = false;
55+
let importFile: boolean | string[] = false;
5656
processing.value = true;
5757
const set = currentSet.value === 'default' ? undefined : currentSet.value;
5858
if (ret.fileList?.length) {
@@ -74,6 +74,17 @@ export default defineComponent({
7474
set,
7575
);
7676
}
77+
console.log(importFile);
78+
if (Array.isArray(importFile) && importFile.length) {
79+
const text = ['There were warnings when importing. While the data imported properly please double check your annotations',
80+
'Below is a list of information that can help with debugging',
81+
].concat(importFile as string[]);
82+
await prompt({
83+
title: 'Import Warnings',
84+
text,
85+
positiveButton: 'OK',
86+
});
87+
}
7788
7889
if (importFile) {
7990
processing.value = false;

client/platform/desktop/backend/cli.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ function updater(update: DesktopJobUpdate) {
5151

5252
async function parseViameFile(file: string) {
5353
const data = await parseFile(file);
54-
stdout.write(JSON.stringify(data));
54+
stdout.write(JSON.stringify(data[0]));
5555
}
5656

5757
async function parseJsonFile(filepath: string, metapath: string) {

client/platform/desktop/backend/ipcService.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -95,6 +95,9 @@ export default function register() {
9595
id, path, additive, additivePrepend,
9696
}: { id: string; path: string; additive: boolean; additivePrepend: string }) => {
9797
const ret = await common.dataFileImport(settings.get(), id, path, additive, additivePrepend);
98+
console.log(ret.warnings);
99+
if (ret.warnings.length)
100+
return ret.warnings;
98101
return ret;
99102
});
100103

client/platform/desktop/backend/native/common.ts

Lines changed: 20 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -572,13 +572,14 @@ async function _ingestFilePath(
572572
imageMap?: Map<string, number>,
573573
additive = false,
574574
additivePrepend = '',
575-
): Promise<(DatasetMetaMutable & { fps?: number }) | null> {
575+
): Promise<[(DatasetMetaMutable & { fps?: number }), string[]] | null> {
576576
if (!fs.existsSync(path)) {
577577
return null;
578578
}
579579
if (fs.statSync(path).size === 0) {
580580
return null;
581581
}
582+
let warnings: string[] = [];
582583
// Make a copy of the file in aux
583584
const projectInfo = getProjectDir(settings, datasetId);
584585
const newPath = npath.join(projectInfo.auxDirAbsPath, `imported_${npath.basename(path)}`);
@@ -606,9 +607,10 @@ async function _ingestFilePath(
606607
} else if (CsvFileName.test(path)) {
607608
// VIAME CSV File
608609
const data = await viameSerializers.parseFile(path, imageMap);
609-
annotations.tracks = data.tracks;
610-
annotations.groups = data.groups;
611-
meta.fps = data.fps;
610+
annotations.tracks = data[0].tracks;
611+
annotations.groups = data[0].groups;
612+
meta.fps = data[0].fps;
613+
[, warnings] = data;
612614
} else if (YAMLFileName.test(path)) {
613615
annotations = await kpf.parse([path]);
614616
}
@@ -646,7 +648,7 @@ async function _ingestFilePath(
646648
await _saveSerialized(settings, datasetId, annotations, true);
647649
}
648650

649-
return meta;
651+
return [meta, warnings];
650652
}
651653

652654
/**
@@ -673,15 +675,20 @@ async function ingestDataFiles(
673675
): Promise<{
674676
processedFiles: string[];
675677
meta: DatasetMetaMutable & { fps?: number };
678+
warnings: string[];
676679
}> {
677680
const processedFiles = []; // which files were processed to generate the detections
678681
const meta = {};
679-
682+
let outwarnings: string[] = [];
680683
for (let i = 0; i < absPaths.length; i += 1) {
681684
const path = absPaths[i];
682685
// eslint-disable-next-line no-await-in-loop
683-
const newMeta = await _ingestFilePath(settings, datasetId, path, imageMap, additive, additivePrepend);
684-
if (newMeta !== null) {
686+
const results = await _ingestFilePath(
687+
settings, datasetId, path, imageMap, additive, additivePrepend,
688+
);
689+
if (results !== null) {
690+
const [newMeta, warnings] = results;
691+
outwarnings = warnings;
685692
merge(meta, newMeta);
686693
processedFiles.push(path);
687694
}
@@ -694,15 +701,17 @@ async function ingestDataFiles(
694701
const path = cameraAndPath[i][1];
695702
const cameraDatasetId = `${datasetId}/${cameraName}`;
696703
// eslint-disable-next-line no-await-in-loop
697-
const newMeta = await _ingestFilePath(settings, cameraDatasetId, path, imageMap);
698-
if (newMeta !== null) {
704+
const results = await _ingestFilePath(settings, cameraDatasetId, path, imageMap);
705+
if (results !== null) {
706+
const [newMeta, warnings] = results;
707+
outwarnings = outwarnings.concat(warnings);
699708
merge(meta, newMeta);
700709
processedFiles.push(path);
701710
}
702711
}
703712
}
704713

705-
return { processedFiles, meta };
714+
return { processedFiles, meta, warnings: outwarnings };
706715
}
707716
/**
708717
* Need to take the trained pipeline if it exists and place it in the DIVE_Pipelines folder

client/platform/desktop/backend/serializers/viame.spec.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -239,9 +239,9 @@ describe('VIAME Python Compatibility Check', () => {
239239
const trackArray = Object.values(trackData);
240240
// eslint-disable-next-line no-await-in-loop
241241
const results = await parse(csvStream);
242-
expect(Object.values(results.tracks)).toEqual(trackArray);
242+
expect(Object.values(results[0].tracks)).toEqual(trackArray);
243243
// eslint-disable-next-line no-await-in-loop
244-
const attData = processTrackAttributes(Object.values(results.tracks));
244+
const attData = processTrackAttributes(Object.values(results[0].tracks));
245245
expect(testAttributes).toEqual(attData.attributes);
246246
}
247247
});
@@ -313,7 +313,7 @@ describe('Test Image Filenames', () => {
313313
} else {
314314
// eslint-disable-next-line no-await-in-loop
315315
const result = await parseFile(testPath, imageMap);
316-
expect(Object.values(result.tracks).length).toBeGreaterThan(0);
316+
expect(Object.values(result[0].tracks).length).toBeGreaterThan(0);
317317
}
318318
}
319319
});

client/platform/desktop/backend/serializers/viame.ts

Lines changed: 17 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -241,7 +241,7 @@ function _parseFeature(row: string[]) {
241241
};
242242
}
243243

244-
async function parse(input: Readable, imageMap?: Map<string, number>): Promise<AnnotationFileData> {
244+
async function parse(input: Readable, imageMap?: Map<string, number>): Promise<[AnnotationFileData, string[]]> {
245245
const parser = csvparser({
246246
delimiter: ',',
247247
// comment lines may not have the correct number of columns
@@ -253,8 +253,9 @@ async function parse(input: Readable, imageMap?: Map<string, number>): Promise<A
253253
const foundImages: {image: string; frame: number; csvFrame: number}[] = [];
254254
let error: Error | undefined;
255255
let multiFrameTracks = false;
256+
const warnings: string[] = [];
256257

257-
return new Promise<AnnotationFileData>((resolve, reject) => {
258+
return new Promise<[AnnotationFileData, string[]]>((resolve, reject) => {
258259
pipeline([input, parser], (err) => {
259260
// undefined err indicates successful exit
260261
if (err !== undefined) {
@@ -280,15 +281,17 @@ async function parse(input: Readable, imageMap?: Map<string, number>): Promise<A
280281
}
281282
const k = i + 1;
282283
if (k < filteredImages.length) {
283-
if (filteredImages[i].csvFrame + 1 !== filteredImages[k].csvFrame || filteredImages[i].frame + 1 !== filteredImages[k].frame) {
284+
const itemDifference = foundImages[k].csvFrame - filteredImages[i].csvFrame;
285+
if (
286+
foundImages[i].csvFrame + itemDifference !== filteredImages[k].csvFrame || filteredImages[i].frame + itemDifference !== filteredImages[k].frame) {
284287
// We have misaligned image sequences so we error out
285-
error = new Error(`A subsampling of images were used with the CSV but they were not sequential\n
288+
warnings.push(`A subsampling of images were used with the CSV but they were not sequential\n
286289
${filteredImages[i].csvFrame + 1} !== ${filteredImages[k].csvFrame} || ${filteredImages[i].frame + 1} !== ${filteredImages[k].frame}\n
287290
image1: ${filteredImages[i].image} image2: ${filteredImages[k].image} - these should be sequential in the CSV
288291
\n`);
289292
}
290293
}
291-
frameMapper[filteredImages[i].csvFrame] = i;
294+
frameMapper[filteredImages[i].csvFrame] = filteredImages[i].frame;
292295
minFrame = Math.min(minFrame, filteredImages[i].csvFrame);
293296
maxFrame = Math.max(maxFrame, filteredImages[i].csvFrame);
294297
}
@@ -347,7 +350,9 @@ async function parse(input: Readable, imageMap?: Map<string, number>): Promise<A
347350
if (k < foundImages.length) {
348351
if (foundImages[i].csvFrame > foundImages[k].csvFrame || foundImages[i].frame > foundImages[k].frame) {
349352
// We have misaligned video sequences so we error out
350-
error = new Error('Images were provided in an unexpected order and dataset contains multi-frame tracks.');
353+
warnings.push(`Images were provided in an unexpected order and dataset contains multi-frame tracks.\n
354+
image${i}: frame: ${foundImages[i].frame} csvFrame: ${foundImages[i].csvFrame}
355+
image${k}: frame: ${foundImages[k].frame} csvFrame: ${foundImages[k].csvFrame}`);
351356
}
352357
}
353358
}
@@ -357,7 +362,7 @@ async function parse(input: Readable, imageMap?: Map<string, number>): Promise<A
357362
if (error !== undefined) {
358363
reject(error);
359364
}
360-
resolve({ tracks, groups: {}, fps });
365+
resolve([{ tracks, groups: {}, fps }, warnings]);
361366
});
362367
parser.on('readable', () => {
363368
let record: string[];
@@ -407,8 +412,10 @@ async function parse(input: Readable, imageMap?: Map<string, number>): Promise<A
407412
});
408413
if (rowInfo.frame < maxFeatureFrame) {
409414
// trackId was already in dataMap, and frame is out of order
410-
error = new Error(
411-
'annotations were provided in an unexpected order and dataset contains multi-frame tracks',
415+
warnings.push(
416+
`annotations were provided in an unexpected order and dataset contains multi-frame tracks:
417+
id: ${rowInfo.id} filename: ${rowInfo.filename} frame: ${rowInfo.frame}
418+
maxFeatureFrame: ${maxFeatureFrame}`,
412419
);
413420
// eslint-disable-next-line no-continue
414421
continue;
@@ -446,7 +453,7 @@ async function parse(input: Readable, imageMap?: Map<string, number>): Promise<A
446453
}
447454

448455
async function parseFile(path: string, imageMap?: Map<string, number>):
449-
Promise<AnnotationFileData> {
456+
Promise<[AnnotationFileData, string[]]> {
450457
const stream = fs.createReadStream(path);
451458
return parse(stream, imageMap);
452459
}

client/platform/desktop/frontend/api.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,7 @@ function importMultiCam(args: MultiCamImportArgs):
131131
}
132132

133133
// eslint-disable-next-line @typescript-eslint/no-unused-vars
134-
function importAnnotationFile(id: string, path: string, _htmlFile = undefined, additive = false, additivePrepend = ''): Promise<boolean> {
134+
function importAnnotationFile(id: string, path: string, _htmlFile = undefined, additive = false, additivePrepend = ''): Promise<boolean | string[]> {
135135
return ipcRenderer.invoke('import-annotation', {
136136
id, path, additive, additivePrepend,
137137
});

client/platform/web-girder/api/dataset.service.ts

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ function makeViameFolder({
8888
);
8989
}
9090

91-
async function importAnnotationFile(parentId: string, path: string, file?: HTMLFile, additive = false, additivePrepend = '', set: string | undefined = undefined) {
91+
async function importAnnotationFile(parentId: string, path: string, file?: HTMLFile, additive = false, additivePrepend = '', set: string | undefined = undefined): Promise<boolean | string[]> {
9292
if (file === undefined) {
9393
return false;
9494
}
@@ -111,6 +111,11 @@ async function importAnnotationFile(parentId: string, path: string, file?: HTMLF
111111
});
112112
if (uploadResponse.status === 200) {
113113
const final = await postProcess(parentId, true, false, additive, additivePrepend, set);
114+
if (final.data.length > 1) {
115+
const warnings = final.data[1];
116+
return warnings;
117+
}
118+
114119
return final.status === 200;
115120
}
116121
}

client/platform/web-girder/api/rpc.service.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
import girderRest from 'platform/web-girder/plugins/girder';
2+
import type { GirderModel } from '@girder/components/src';
23
import { Pipe } from 'dive-common/apispec';
34

45
function postProcess(folderId: string, skipJobs = false, skipTranscoding = false, additive = false, additivePrepend = '', set: string | undefined = undefined) {
5-
return girderRest.post(`dive_rpc/postprocess/${folderId}`, null, {
6+
return girderRest.post<[GirderModel, string[]]>(`dive_rpc/postprocess/${folderId}`, null, {
67
params: {
78
skipJobs, skipTranscoding, additive, additivePrepend, set,
89
},

client/platform/web-girder/views/Export.vue

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -318,10 +318,8 @@ export default defineComponent({
318318
block
319319
class="mt-2"
320320
:disabled="!exportUrls.exportDetectionsUrl"
321-
@click="doExport({
322-
url: exportUrls
323-
&& exportUrls.exportDetectionsUrlTrackJSON,
324-
})"
321+
@click="doExport({ url: exportUrls
322+
&& exportUrls.exportDetectionsUrlTrackJSON })"
325323
>
326324
<span
327325
v-if="exportUrls.exportDetectionsUrl"

server/dive_server/crud_rpc.py

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -288,7 +288,7 @@ def run_training(
288288
def _get_data_by_type(
289289
file: types.GirderModel,
290290
image_map: Optional[Dict[str, int]] = None,
291-
) -> Optional[GetDataReturnType]:
291+
) -> Optional[Tuple[GetDataReturnType, Optional[List[str]]]]:
292292
"""
293293
Given an arbitrary Girder file model, figure out what kind of file it is and
294294
parse it appropriately.
@@ -325,10 +325,10 @@ def _get_data_by_type(
325325

326326
# Parse the file as the now known type
327327
if as_type == crud.FileType.VIAME_CSV:
328-
converted, attributes = viame.load_csv_as_tracks_and_attributes(
328+
converted, attributes, warnings = viame.load_csv_as_tracks_and_attributes(
329329
file_string.splitlines(), image_map
330330
)
331-
return {'annotations': converted, 'meta': None, 'attributes': attributes, 'type': as_type}
331+
return {'annotations': converted, 'meta': None, 'attributes': attributes, 'type': as_type}, warnings
332332
if as_type == crud.FileType.MEVA_KPF:
333333
converted, attributes = kpf.convert(kpf.load(file_string))
334334
return {'annotations': converted, 'meta': None, 'attributes': attributes, 'type': as_type}
@@ -374,6 +374,7 @@ def process_items(
374374
folder,
375375
user,
376376
)
377+
aggregate_warnings = []
377378
for item in unprocessed_items:
378379
file: Optional[types.GirderModel] = next(Item().childFiles(item), None)
379380
if file is None:
@@ -383,7 +384,9 @@ def process_items(
383384
image_map = None
384385
if fromMeta(folder, constants.TypeMarker) == 'image-sequence':
385386
image_map = crud.valid_image_names_dict(crud.valid_images(folder, user))
386-
results = _get_data_by_type(file, image_map=image_map)
387+
results, warnings = _get_data_by_type(file, image_map=image_map)
388+
if warnings:
389+
aggregate_warnings += warnings
387390
except Exception as e:
388391
Item().remove(item)
389392
raise RestException(f'{file["name"]} was not a supported file type: {e}') from e
@@ -414,7 +417,7 @@ def process_items(
414417
crud.saveImportAttributes(folder, results['attributes'], user)
415418
if results['meta']:
416419
crud_dataset.update_metadata(folder, results['meta'], False)
417-
420+
return aggregate_warnings
418421

419422
def postprocess(
420423
user: types.GirderUserModel,
@@ -424,7 +427,7 @@ def postprocess(
424427
additive=False,
425428
additivePrepend='',
426429
set='',
427-
) -> types.GirderModel:
430+
) -> Tuple[types.GirderModel, Optional[List[str]]]:
428431
"""
429432
Post-processing to be run after media/annotation import
430433
@@ -539,8 +542,8 @@ def postprocess(
539542

540543
Folder().save(dsFolder)
541544

542-
process_items(dsFolder, user, additive, additivePrepend, set)
543-
return dsFolder
545+
aggregate_warnings = process_items(dsFolder, user, additive, additivePrepend, set)
546+
return dsFolder, aggregate_warnings
544547

545548

546549
def convert_large_image(

0 commit comments

Comments
 (0)