Poprawki - usuwamy convlaba
This commit is contained in:
parent
166dd4886c
commit
ff0b4af87c
@ -11,9 +11,6 @@ monitor = DialogStateMonitor() # DSM
|
||||
dialog_policy = DialogPolicy() # DP
|
||||
language_generation = NaturalLanguageGeneration(templates) # NLG
|
||||
|
||||
agent = PipelineAgent(nlu=nlu, dst=monitor, policy=None, nlg=language_generation, name='sys')
|
||||
resp = agent.response("Dzień dobry")
|
||||
print(resp)
|
||||
# Main loop
|
||||
dial_num = 0
|
||||
print("CTRL+C aby zakończyć program.")
|
||||
@ -35,8 +32,7 @@ while True:
|
||||
system_action = dialog_policy.predict(monitor)
|
||||
|
||||
# NLG
|
||||
act, slots = parse_frame(frame)
|
||||
response = language_generation.generate(act, slots)
|
||||
response = language_generation.generate(frame)
|
||||
print(response)
|
||||
|
||||
if frame.act == "bye":
|
||||
|
@ -1,6 +1,6 @@
|
||||
from collections import defaultdict
|
||||
from model.frame import Frame
|
||||
from src.model.slot import Slot
|
||||
from model.slot import Slot
|
||||
|
||||
class DialogPolicy:
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
from src.model.frame import Frame
|
||||
from model.frame import Frame
|
||||
import copy
|
||||
import json
|
||||
|
||||
@ -9,7 +9,7 @@ def normalize(value):
|
||||
|
||||
|
||||
class DialogStateMonitor:
|
||||
def __init__(self, initial_state_file: str = '../attributes.json'):
|
||||
def __init__(self, initial_state_file: str = 'attributes.json'):
|
||||
with open(initial_state_file) as file:
|
||||
constants = json.load(file)
|
||||
self.__initial_state = dict(belief_state={
|
||||
|
@ -1,8 +1,7 @@
|
||||
from convlab.nlu.nlu import NLU
|
||||
from flair.models import SequenceTagger
|
||||
from utils.nlu_utils import predict_single, predict_and_annotate
|
||||
from model.frame import Frame, Slot
|
||||
|
||||
import random
|
||||
"""
|
||||
ACTS:
|
||||
inform/order
|
||||
@ -42,7 +41,7 @@ SLOTS:
|
||||
sauce
|
||||
"""
|
||||
|
||||
class NaturalLanguageUnderstanding(NLU):
|
||||
class NaturalLanguageUnderstanding():
|
||||
def __init__(self):
|
||||
print("\n========================================================")
|
||||
print("Models are loading, it may take a moment, please wait...")
|
||||
@ -86,8 +85,15 @@ class NaturalLanguageUnderstanding(NLU):
|
||||
|
||||
return slots
|
||||
|
||||
def predict(self, text: str, context: list):
|
||||
def predict(self, text: str):
|
||||
act = self.__predict_intention(text)
|
||||
slots = self.__predict_slot(text)
|
||||
frame = Frame(source = 'user', act = act, slots = slots)
|
||||
# uncomment to quickly mock the response
|
||||
# frames = [
|
||||
# Frame(source="user", act = "inform/order", slots=[Slot(name="pizza", value="barcelona")]),
|
||||
# Frame(source="user", act = "welcomemsg", slots=[]),
|
||||
# Frame(source="user", act = "request/menu", slots=[]),
|
||||
# ]
|
||||
# return random.choice(frames)
|
||||
return frame
|
@ -1,7 +1,6 @@
|
||||
import re
|
||||
from service.template_selector import select_template
|
||||
import random
|
||||
from convlab.nlg.nlg import NLG
|
||||
|
||||
# from service.templates import templates
|
||||
|
||||
@ -14,11 +13,12 @@ def parse_frame(frame):
|
||||
|
||||
return act, slots
|
||||
|
||||
class NaturalLanguageGeneration(NLG):
|
||||
class NaturalLanguageGeneration():
|
||||
def __init__(self, templates):
|
||||
self.templates = templates
|
||||
|
||||
def generate(self, act, slots):
|
||||
def generate(self, frame):
|
||||
act, slots = parse_frame(frame)
|
||||
template = select_template(act, slots)
|
||||
if template == "default/template":
|
||||
template = random.choice(self.templates["default/template"])
|
||||
|
Loading…
Reference in New Issue
Block a user