Dodaje opcje do mockowania NLU
This commit is contained in:
parent
ff0b4af87c
commit
f4d9bff809
@ -1,12 +1,12 @@
|
|||||||
from service.dialog_state_monitor import DialogStateMonitor
|
from service.dialog_state_monitor import DialogStateMonitor
|
||||||
from service.dialog_policy import DialogPolicy
|
from service.dialog_policy import DialogPolicy
|
||||||
from service.natural_languag_understanding import NaturalLanguageUnderstanding
|
from service.natural_languag_understanding import NaturalLanguageUnderstanding
|
||||||
from service.natural_language_generation import NaturalLanguageGeneration, parse_frame
|
from service.natural_language_generation import NaturalLanguageGeneration
|
||||||
from service.templates import templates
|
from service.templates import templates
|
||||||
from convlab.dialog_agent import PipelineAgent
|
|
||||||
|
|
||||||
# initialize classes
|
# initialize classes
|
||||||
nlu = NaturalLanguageUnderstanding() # NLU
|
|
||||||
|
nlu = NaturalLanguageUnderstanding(use_mocks=False) # NLU
|
||||||
monitor = DialogStateMonitor() # DSM
|
monitor = DialogStateMonitor() # DSM
|
||||||
dialog_policy = DialogPolicy() # DP
|
dialog_policy = DialogPolicy() # DP
|
||||||
language_generation = NaturalLanguageGeneration(templates) # NLG
|
language_generation = NaturalLanguageGeneration(templates) # NLG
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
from flair.models import SequenceTagger
|
|
||||||
from utils.nlu_utils import predict_single, predict_and_annotate
|
from utils.nlu_utils import predict_single, predict_and_annotate
|
||||||
from model.frame import Frame, Slot
|
from model.frame import Frame, Slot
|
||||||
import random
|
import random
|
||||||
@ -42,7 +41,12 @@ SLOTS:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
class NaturalLanguageUnderstanding():
|
class NaturalLanguageUnderstanding():
|
||||||
def __init__(self):
|
def __init__(self, use_mocks=False):
|
||||||
|
if use_mocks:
|
||||||
|
self.use_mocks = True
|
||||||
|
return
|
||||||
|
|
||||||
|
from flair.models import SequenceTagger
|
||||||
print("\n========================================================")
|
print("\n========================================================")
|
||||||
print("Models are loading, it may take a moment, please wait...")
|
print("Models are loading, it may take a moment, please wait...")
|
||||||
print("========================================================\n")
|
print("========================================================\n")
|
||||||
@ -86,14 +90,16 @@ class NaturalLanguageUnderstanding():
|
|||||||
return slots
|
return slots
|
||||||
|
|
||||||
def predict(self, text: str):
|
def predict(self, text: str):
|
||||||
|
if not self.use_mocks:
|
||||||
act = self.__predict_intention(text)
|
act = self.__predict_intention(text)
|
||||||
slots = self.__predict_slot(text)
|
slots = self.__predict_slot(text)
|
||||||
frame = Frame(source = 'user', act = act, slots = slots)
|
frame = Frame(source = 'user', act = act, slots = slots)
|
||||||
# uncomment to quickly mock the response
|
|
||||||
# frames = [
|
|
||||||
# Frame(source="user", act = "inform/order", slots=[Slot(name="pizza", value="barcelona")]),
|
|
||||||
# Frame(source="user", act = "welcomemsg", slots=[]),
|
|
||||||
# Frame(source="user", act = "request/menu", slots=[]),
|
|
||||||
# ]
|
|
||||||
# return random.choice(frames)
|
|
||||||
return frame
|
return frame
|
||||||
|
else:
|
||||||
|
frames = [
|
||||||
|
Frame(source="user", act = "inform/order", slots=[Slot(name="pizza", value="barcelona")]),
|
||||||
|
Frame(source="user", act = "welcomemsg", slots=[]),
|
||||||
|
Frame(source="user", act = "request/menu", slots=[]),
|
||||||
|
]
|
||||||
|
return random.choice(frames)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user