paper-cutter/{{cookiecutter.paper_repo_name}}/bibliography.bib

40 lines
2.1 KiB
BibTeX
Raw Permalink Normal View History

% here the bibliographic entry for the paper itself should be given,
% used for generating declarations
@InProceedings{this-paper,
author="{{cookiecutter.paper_title}}",
title="{{cookiecutter.main_contributor_name}}",
year=2021
}
{% raw %}
@inproceedings{gralinski-etal-2019-geval,
title = "{GE}val: Tool for Debugging {NLP} Datasets and Models",
author = "Grali{\'n}ski, Filip and
Wr{\'o}blewska, Anna and
Stanis{\l}awek, Tomasz and
Grabowski, Kamil and
G{\'o}recki, Tomasz",
booktitle = "Proceedings of the 2019 ACL Workshop BlackboxNLP: Analyzing and Interpreting Neural Networks for NLP",
month = aug,
year = "2019",
address = "Florence, Italy",
publisher = "Association for Computational Linguistics",
url = "https://www.aclweb.org/anthology/W19-4826",
pages = "254--262",
abstract = "This paper presents a simple but general and effective method to debug the output of machine learning (ML) supervised models, including neural networks. The algorithm looks for features that lower the evaluation metric in such a way that it cannot be ascribed to chance (as measured by their p-values). Using this method {--} implemented as MLEval tool {--} you can find: (1) anomalies in test sets, (2) issues in preprocessing, (3) problems in the ML model itself. It can give you an insight into what can be improved in the datasets and/or the model. The same method can be used to compare ML models or different versions of the same model. We present the tool, the theory behind it and use cases for text-based models of various types.",
}
2021-02-22 22:13:28 +01:00
@incollection {gonito2016,
title = {Gonito.net -- Open Platform for Research Competition, Cooperation and Reproducibility},
author = "Grali{\'n}ski, Filip and Jaworski, Rafa{\l} and Borchmann, {\L}ukasz and Wierzcho{\'n}, Piotr",
editor = "Branco, António and Calzolari , Nicoletta and Choukri, Khalid",
booktitle = {Proceedings of the 4REAL Workshop: Workshop on Research Results Reproducibility and Resources Citation in Science and Technology of Language},
year = "2016",
pages = "13-20"
}
{% endraw %}