Poprawki - usuwamy convlaba
This commit is contained in:
parent
166dd4886c
commit
ff0b4af87c
@ -11,9 +11,6 @@ monitor = DialogStateMonitor() # DSM
|
|||||||
dialog_policy = DialogPolicy() # DP
|
dialog_policy = DialogPolicy() # DP
|
||||||
language_generation = NaturalLanguageGeneration(templates) # NLG
|
language_generation = NaturalLanguageGeneration(templates) # NLG
|
||||||
|
|
||||||
agent = PipelineAgent(nlu=nlu, dst=monitor, policy=None, nlg=language_generation, name='sys')
|
|
||||||
resp = agent.response("Dzień dobry")
|
|
||||||
print(resp)
|
|
||||||
# Main loop
|
# Main loop
|
||||||
dial_num = 0
|
dial_num = 0
|
||||||
print("CTRL+C aby zakończyć program.")
|
print("CTRL+C aby zakończyć program.")
|
||||||
@ -35,8 +32,7 @@ while True:
|
|||||||
system_action = dialog_policy.predict(monitor)
|
system_action = dialog_policy.predict(monitor)
|
||||||
|
|
||||||
# NLG
|
# NLG
|
||||||
act, slots = parse_frame(frame)
|
response = language_generation.generate(frame)
|
||||||
response = language_generation.generate(act, slots)
|
|
||||||
print(response)
|
print(response)
|
||||||
|
|
||||||
if frame.act == "bye":
|
if frame.act == "bye":
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from model.frame import Frame
|
from model.frame import Frame
|
||||||
from src.model.slot import Slot
|
from model.slot import Slot
|
||||||
|
|
||||||
class DialogPolicy:
|
class DialogPolicy:
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
from src.model.frame import Frame
|
from model.frame import Frame
|
||||||
import copy
|
import copy
|
||||||
import json
|
import json
|
||||||
|
|
||||||
@ -9,7 +9,7 @@ def normalize(value):
|
|||||||
|
|
||||||
|
|
||||||
class DialogStateMonitor:
|
class DialogStateMonitor:
|
||||||
def __init__(self, initial_state_file: str = '../attributes.json'):
|
def __init__(self, initial_state_file: str = 'attributes.json'):
|
||||||
with open(initial_state_file) as file:
|
with open(initial_state_file) as file:
|
||||||
constants = json.load(file)
|
constants = json.load(file)
|
||||||
self.__initial_state = dict(belief_state={
|
self.__initial_state = dict(belief_state={
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
from convlab.nlu.nlu import NLU
|
|
||||||
from flair.models import SequenceTagger
|
from flair.models import SequenceTagger
|
||||||
from utils.nlu_utils import predict_single, predict_and_annotate
|
from utils.nlu_utils import predict_single, predict_and_annotate
|
||||||
from model.frame import Frame, Slot
|
from model.frame import Frame, Slot
|
||||||
|
import random
|
||||||
"""
|
"""
|
||||||
ACTS:
|
ACTS:
|
||||||
inform/order
|
inform/order
|
||||||
@ -42,7 +41,7 @@ SLOTS:
|
|||||||
sauce
|
sauce
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class NaturalLanguageUnderstanding(NLU):
|
class NaturalLanguageUnderstanding():
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
print("\n========================================================")
|
print("\n========================================================")
|
||||||
print("Models are loading, it may take a moment, please wait...")
|
print("Models are loading, it may take a moment, please wait...")
|
||||||
@ -86,8 +85,15 @@ class NaturalLanguageUnderstanding(NLU):
|
|||||||
|
|
||||||
return slots
|
return slots
|
||||||
|
|
||||||
def predict(self, text: str, context: list):
|
def predict(self, text: str):
|
||||||
act = self.__predict_intention(text)
|
act = self.__predict_intention(text)
|
||||||
slots = self.__predict_slot(text)
|
slots = self.__predict_slot(text)
|
||||||
frame = Frame(source = 'user', act = act, slots = slots)
|
frame = Frame(source = 'user', act = act, slots = slots)
|
||||||
|
# uncomment to quickly mock the response
|
||||||
|
# frames = [
|
||||||
|
# Frame(source="user", act = "inform/order", slots=[Slot(name="pizza", value="barcelona")]),
|
||||||
|
# Frame(source="user", act = "welcomemsg", slots=[]),
|
||||||
|
# Frame(source="user", act = "request/menu", slots=[]),
|
||||||
|
# ]
|
||||||
|
# return random.choice(frames)
|
||||||
return frame
|
return frame
|
@ -1,26 +1,26 @@
|
|||||||
import re
|
import re
|
||||||
from service.template_selector import select_template
|
from service.template_selector import select_template
|
||||||
import random
|
import random
|
||||||
from convlab.nlg.nlg import NLG
|
|
||||||
|
# from service.templates import templates
|
||||||
# from service.templates import templates
|
|
||||||
|
def parse_frame(frame):
|
||||||
def parse_frame(frame):
|
if not hasattr(frame, 'act') or not hasattr(frame, 'slots'):
|
||||||
if not hasattr(frame, 'act') or not hasattr(frame, 'slots'):
|
raise TypeError("Expected a Frame object with 'act' and 'slots' attributes.")
|
||||||
raise TypeError("Expected a Frame object with 'act' and 'slots' attributes.")
|
|
||||||
|
act = frame.act
|
||||||
act = frame.act
|
slots = [{"name": slot.name, "value": slot.value} for slot in frame.slots]
|
||||||
slots = [{"name": slot.name, "value": slot.value} for slot in frame.slots]
|
|
||||||
|
return act, slots
|
||||||
return act, slots
|
|
||||||
|
class NaturalLanguageGeneration():
|
||||||
class NaturalLanguageGeneration(NLG):
|
def __init__(self, templates):
|
||||||
def __init__(self, templates):
|
self.templates = templates
|
||||||
self.templates = templates
|
|
||||||
|
def generate(self, frame):
|
||||||
def generate(self, act, slots):
|
act, slots = parse_frame(frame)
|
||||||
template = select_template(act, slots)
|
template = select_template(act, slots)
|
||||||
if template == "default/template":
|
if template == "default/template":
|
||||||
template = random.choice(self.templates["default/template"])
|
template = random.choice(self.templates["default/template"])
|
||||||
slot_dict = {slot['name']: slot['value'] for slot in slots}
|
slot_dict = {slot['name']: slot['value'] for slot in slots}
|
||||||
return template.format(**slot_dict)
|
return template.format(**slot_dict)
|
Loading…
Reference in New Issue
Block a user