Poprawki - usuwamy convlaba

This commit is contained in:
s495727 2024-06-10 17:37:59 +02:00
parent 166dd4886c
commit ff0b4af87c
5 changed files with 39 additions and 37 deletions

View File

@ -11,9 +11,6 @@ monitor = DialogStateMonitor() # DSM
dialog_policy = DialogPolicy() # DP
language_generation = NaturalLanguageGeneration(templates) # NLG
agent = PipelineAgent(nlu=nlu, dst=monitor, policy=None, nlg=language_generation, name='sys')
resp = agent.response("Dzień dobry")
print(resp)
# Main loop
dial_num = 0
print("CTRL+C aby zakończyć program.")
@ -35,8 +32,7 @@ while True:
system_action = dialog_policy.predict(monitor)
# NLG
act, slots = parse_frame(frame)
response = language_generation.generate(act, slots)
response = language_generation.generate(frame)
print(response)
if frame.act == "bye":

View File

@ -1,6 +1,6 @@
from collections import defaultdict
from model.frame import Frame
from src.model.slot import Slot
from model.slot import Slot
class DialogPolicy:

View File

@ -1,4 +1,4 @@
from src.model.frame import Frame
from model.frame import Frame
import copy
import json
@ -9,7 +9,7 @@ def normalize(value):
class DialogStateMonitor:
def __init__(self, initial_state_file: str = '../attributes.json'):
def __init__(self, initial_state_file: str = 'attributes.json'):
with open(initial_state_file) as file:
constants = json.load(file)
self.__initial_state = dict(belief_state={

View File

@ -1,8 +1,7 @@
from convlab.nlu.nlu import NLU
from flair.models import SequenceTagger
from utils.nlu_utils import predict_single, predict_and_annotate
from model.frame import Frame, Slot
import random
"""
ACTS:
inform/order
@ -42,7 +41,7 @@ SLOTS:
sauce
"""
class NaturalLanguageUnderstanding(NLU):
class NaturalLanguageUnderstanding():
def __init__(self):
print("\n========================================================")
print("Models are loading, it may take a moment, please wait...")
@ -86,8 +85,15 @@ class NaturalLanguageUnderstanding(NLU):
return slots
def predict(self, text: str, context: list):
def predict(self, text: str):
act = self.__predict_intention(text)
slots = self.__predict_slot(text)
frame = Frame(source = 'user', act = act, slots = slots)
# uncomment to quickly mock the response
# frames = [
# Frame(source="user", act = "inform/order", slots=[Slot(name="pizza", value="barcelona")]),
# Frame(source="user", act = "welcomemsg", slots=[]),
# Frame(source="user", act = "request/menu", slots=[]),
# ]
# return random.choice(frames)
return frame

View File

@ -1,26 +1,26 @@
import re
from service.template_selector import select_template
import random
from convlab.nlg.nlg import NLG
# from service.templates import templates
def parse_frame(frame):
if not hasattr(frame, 'act') or not hasattr(frame, 'slots'):
raise TypeError("Expected a Frame object with 'act' and 'slots' attributes.")
act = frame.act
slots = [{"name": slot.name, "value": slot.value} for slot in frame.slots]
return act, slots
class NaturalLanguageGeneration(NLG):
def __init__(self, templates):
self.templates = templates
def generate(self, act, slots):
template = select_template(act, slots)
if template == "default/template":
template = random.choice(self.templates["default/template"])
slot_dict = {slot['name']: slot['value'] for slot in slots}
import re
from service.template_selector import select_template
import random
# from service.templates import templates
def parse_frame(frame):
if not hasattr(frame, 'act') or not hasattr(frame, 'slots'):
raise TypeError("Expected a Frame object with 'act' and 'slots' attributes.")
act = frame.act
slots = [{"name": slot.name, "value": slot.value} for slot in frame.slots]
return act, slots
class NaturalLanguageGeneration():
def __init__(self, templates):
self.templates = templates
def generate(self, frame):
act, slots = parse_frame(frame)
template = select_template(act, slots)
if template == "default/template":
template = random.choice(self.templates["default/template"])
slot_dict = {slot['name']: slot['value'] for slot in slots}
return template.format(**slot_dict)