@unpublished{WilmFragosoGarciaBertrametal.2022, author = {Wilm, Frauke and Fragoso-Garcia, Marco and Bertram, Christof and Stathonikos, Nikolas and {\"O}ttl, Mathias and Qiu, Jingna and Klopfleisch, Robert and Maier, Andreas and Aubreville, Marc and Breininger, Katharina}, title = {Mind the Gap: Scanner-induced domain shifts pose challenges for representation learning in histopathology}, publisher = {arXiv}, address = {Ithaca}, doi = {https://doi.org/10.48550/arXiv.2211.16141}, year = {2022}, language = {en} } @article{WilmFragosoGarciaMarzahletal.2022, author = {Wilm, Frauke and Fragoso-Garcia, Marco and Marzahl, Christian and Qiu, Jingna and Puget, Chlo{\´e} and Diehl, Laura and Bertram, Christof and Klopfleisch, Robert and Maier, Andreas and Breininger, Katharina and Aubreville, Marc}, title = {Pan-tumor CAnine cuTaneous Cancer Histology (CATCH) dataset}, volume = {9}, pages = {588}, journal = {Scientific Data}, publisher = {Springer Nature}, address = {New York}, issn = {2052-4463}, doi = {https://doi.org/10.1038/s41597-022-01692-w}, year = {2022}, abstract = {Due to morphological similarities, the differentiation of histologic sections of cutaneous tumors into individual subtypes can be challenging. Recently, deep learning-based approaches have proven their potential for supporting pathologists in this regard. However, many of these supervised algorithms require a large amount of annotated data for robust development. We present a publicly available dataset of 350 whole slide images of seven different canine cutaneous tumors complemented by 12,424 polygon annotations for 13 histologic classes, including seven cutaneous tumor subtypes. In inter-rater experiments, we show a high consistency of the provided labels, especially for tumor annotations. We further validate the dataset by training a deep neural network for the task of tissue segmentation and tumor subtype classification. We achieve a class-averaged Jaccard coefficient of 0.7047, and 0.9044 for tumor in particular. For classification, we achieve a slide-level accuracy of 0.9857. Since canine cutaneous tumors possess various histologic homologies to human tumors the added value of this dataset is not limited to veterinary pathology but extends to more general fields of application.}, language = {en} } @inproceedings{QiuWilmOettletal.2023, author = {Qiu, Jingna and Wilm, Frauke and {\"O}ttl, Mathias and Schlereth, Maja and Liu, Chang and Heimann, Tobias and Aubreville, Marc and Breininger, Katharina}, title = {Adaptive Region Selection for Active Learning in Whole Slide Image Semantic Segmentation}, booktitle = {Medical Image Computing and Computer Assisted Intervention - MICCAI 2023: Proceedings, Part II}, editor = {Greenspan, Hayit and Madabhushi, Anant and Mousavi, Parvin and Salcudean, Septimiu and Duncan, James and Syeda-Mahmood, Tanveer and Taylor, Russell}, publisher = {Springer}, address = {Cham}, isbn = {978-3-031-43895-0}, issn = {1611-3349}, doi = {https://doi.org/10.1007/978-3-031-43895-0_9}, pages = {90 -- 100}, year = {2023}, language = {en} } @inproceedings{WilmFragosoGarciaBertrametal.2023, author = {Wilm, Frauke and Fragoso-Garcia, Marco and Bertram, Christof and Stathonikos, Nikolas and {\"O}ttl, Mathias and Qiu, Jingna and Klopfleisch, Robert and Maier, Andreas and Aubreville, Marc and Breininger, Katharina}, title = {Mind the Gap: Scanner-Induced Domain Shifts Pose Challenges for Representation Learning in Histopathology}, booktitle = {2023 IEEE 20th International Symposium on Biomedical Imaging (ISBI)}, publisher = {IEEE}, address = {Piscataway}, isbn = {978-1-6654-7358-3}, doi = {https://doi.org/10.1109/ISBI53787.2023.10230458}, year = {2023}, language = {en} } @unpublished{QiuAubrevilleWilmetal.2024, author = {Qiu, Jingna and Aubreville, Marc and Wilm, Frauke and {\"O}ttl, Mathias and Utz, Jonas and Schlereth, Maja and Breininger, Katharina}, title = {Leveraging Image Captions for Selective Whole Slide Image Annotation}, publisher = {arXiv}, address = {Ithaca}, doi = {https://doi.org/10.48550/arXiv.2407.06363}, year = {2024}, language = {en} }