Add helper function to options

This commit is contained in:
Filip Gralinski 2019-09-23 18:31:52 +02:00
parent 00addc5620
commit fd12a55bcd
3 changed files with 11 additions and 8 deletions

View File

@ -1,5 +1,5 @@
name: geval name: geval
version: 1.21.0.0 version: 1.21.1.0
synopsis: Machine learning evaluation tools synopsis: Machine learning evaluation tools
description: Please see README.md description: Please see README.md
homepage: http://github.com/name/project homepage: http://github.com/name/project

View File

@ -9,7 +9,8 @@ module GEval.MetricsMeta
getEvaluationSchemeDescription, getEvaluationSchemeDescription,
outContents, outContents,
expectedScore, expectedScore,
allMetricsDescription) allMetricsDescription,
helpMetricParameterMetricsList)
where where
import GEval.Common import GEval.Common
@ -146,6 +147,12 @@ expectedScore (EvaluationScheme (ProbabilisticMultiLabelFMeasure beta) [])
recall = 0.675 recall = 0.675
in weightedHarmonicMean beta precision recall in weightedHarmonicMean beta precision recall
helpMetricParameterMetricsList :: String
helpMetricParameterMetricsList = intercalate ", " $ map (\s -> (show s) ++ (case extraInfo s of
Just eI -> " (" ++ eI ++ ")"
Nothing -> ""))
listOfAvailableEvaluationSchemes
listOfAvailableEvaluationSchemes :: [EvaluationScheme] listOfAvailableEvaluationSchemes :: [EvaluationScheme]
listOfAvailableEvaluationSchemes = map (\m -> EvaluationScheme m []) listOfAvailableMetrics listOfAvailableEvaluationSchemes = map (\m -> EvaluationScheme m []) listOfAvailableMetrics
++ [ ++ [

View File

@ -27,7 +27,7 @@ import Data.Monoid ((<>))
import GEval.Core import GEval.Core
import GEval.EvaluationScheme import GEval.EvaluationScheme
import GEval.MetricsMeta (extraInfo, listOfAvailableEvaluationSchemes, allMetricsDescription) import GEval.MetricsMeta (extraInfo, listOfAvailableEvaluationSchemes, allMetricsDescription, helpMetricParameterMetricsList)
import GEval.Common import GEval.Common
import GEval.CreateChallenge import GEval.CreateChallenge
import GEval.LineByLine import GEval.LineByLine
@ -247,11 +247,7 @@ metricReader = many $ option auto -- actually `some` should be used inst
( long "metric" -- --metric might be in the config.txt file... ( long "metric" -- --metric might be in the config.txt file...
<> short 'm' <> short 'm'
<> metavar "METRIC" <> metavar "METRIC"
<> help ("Metric to be used, e.g.:" ++ intercalate ", " (map <> help ("Metric to be used, e.g.:" ++ helpMetricParameterMetricsList))
(\s -> (show s) ++ (case extraInfo s of
Just eI -> " (" ++ eI ++ ")"
Nothing -> ""))
listOfAvailableEvaluationSchemes)))
-- RMSE, MSE, MAE, SMAPE, Pearson, Spearman, Accuracy, LogLoss, Likelihood, F-measure (specify as F1, F2, F0.25, etc.), macro F-measure (specify as Macro-F1, Macro-F2, Macro-F0.25, etc.), multi-label F-measure (specify as MultiLabel-F1, MultiLabel-F2, MultiLabel-F0.25, etc.), MultiLabel-Likelihood, MAP, BLEU, GLEU (\"Google GLEU\" not the grammar correction metric), WER, NMI, ClippEU, LogLossHashed, LikelihoodHashed, BIO-F1, BIO-F1-Labels, TokenAccuracy, soft F-measure (specify as Soft-F1, Soft-F2, Soft-F0.25), probabilistic soft F-measure (specify as Probabilistic-Soft-F1, Probabilistic-Soft-F2, Probabilistic-Soft-F0.25) or CharMatch" ) -- RMSE, MSE, MAE, SMAPE, Pearson, Spearman, Accuracy, LogLoss, Likelihood, F-measure (specify as F1, F2, F0.25, etc.), macro F-measure (specify as Macro-F1, Macro-F2, Macro-F0.25, etc.), multi-label F-measure (specify as MultiLabel-F1, MultiLabel-F2, MultiLabel-F0.25, etc.), MultiLabel-Likelihood, MAP, BLEU, GLEU (\"Google GLEU\" not the grammar correction metric), WER, NMI, ClippEU, LogLossHashed, LikelihoodHashed, BIO-F1, BIO-F1-Labels, TokenAccuracy, soft F-measure (specify as Soft-F1, Soft-F2, Soft-F0.25), probabilistic soft F-measure (specify as Probabilistic-Soft-F1, Probabilistic-Soft-F2, Probabilistic-Soft-F0.25) or CharMatch" )