diff --git a/database_talker/.gitignore b/database_talker/.gitignore
new file mode 100644
index 0000000000..d584a46726
--- /dev/null
+++ b/database_talker/.gitignore
@@ -0,0 +1,3 @@
+auth/*
+test_db/*
+.vscode/*
diff --git a/database_talker/CMakeLists.txt b/database_talker/CMakeLists.txt
new file mode 100644
index 0000000000..2fd8ff2bb2
--- /dev/null
+++ b/database_talker/CMakeLists.txt
@@ -0,0 +1,21 @@
+cmake_minimum_required(VERSION 3.0.2)
+project(database_talker)
+
+find_package(catkin REQUIRED COMPONENTS catkin_virtualenv)
+
+## This macro ensures modules and global scripts declared therein get installed
+catkin_python_setup()
+
+catkin_generate_virtualenv(
+ PYTHON_INTERPRETER python3
+ CHECK_VENV FALSE
+ USE_SYSTEM_PACKAGES FALSE # Default TRUE
+)
+
+catkin_package(
+)
+
+catkin_install_python(PROGRAMS
+ sample/include/store_image_description.py scripts/make_diary.py
+ DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+)
diff --git a/database_talker/README.md b/database_talker/README.md
new file mode 100644
index 0000000000..13155279ec
--- /dev/null
+++ b/database_talker/README.md
@@ -0,0 +1,44 @@
+# database talker
+
+## What is this?
+
+This is sample code to generate a response/diary from a robot's experience stored in MongoDB.
+
+## How to setup
+
+Set up a workspace using the `rosinstall` file and compile it with `catkin build database_talker`.
+
+## How to use
+
+For a minimum setup, run the following command. This will start the mongodb/lifelog nodes and save the usb camera data to the database.
+```bash
+roslaunch database_talker sample.launch
+```
+
+To generate a diary using robot memory, execute the following command and talk to GoogleChat bot.
+
+```bash
+rosrun database_talker make_diary.py --prompt-type personality
+```
+
+## Tips
+
+### How to test using data from a specific date without using GoogleChat.
+```bash
+rosrun database_talker make_diary.py --test --prompt-type personality --date 2023-03-20
+```
+
+### Stop using external DBs, this is recommended during debug phase.
+
+Remove `mongodb_store_extras` in `jsk_robot_startup/lifelog/mongodb_replication_params.yaml`
+```
+-mongodb_store_extras: [["robot-database.jsk.imi.i.u-tokyo.ac.jp", 27017],["musca.jsk.imi.i.u-tokyo.ac.jp",27017]]
++mongodb_store_extras: []
+```
+
+### Force store image
+
+An image will only be saved if a significant change is found in the image within seconds. To force the image to be saved, use the following command.
+```
+rostopic pub -1 /publish_trigger_mongodb_event roseus/StringStamped '{header: auto, data: debug}'
+```
\ No newline at end of file
diff --git a/database_talker/aibo.rosinstall b/database_talker/aibo.rosinstall
new file mode 100644
index 0000000000..fa55a7c200
--- /dev/null
+++ b/database_talker/aibo.rosinstall
@@ -0,0 +1,42 @@
+# database talker for aibo
+- git:
+ local-name: jsk_demos
+ uri: https://github.com/sktometometo/jsk_demos
+ version: PR/hoge-py
+# use k-okada' version of aibo_driver (for message)
+- git:
+ local-name: aibo_driver
+ uri: https://gitlab.jsk.imi.i.u-tokyo.ac.jp/k-okada/aibo_status
+ version: driver
+#
+# use latest google_chat_ros, until noetic is synced
+# armhf users need to use https://github.com/k-okada/jsk_3rdparty/commit/8524aaa4118cb7ab02c65f47c46343bbb4f7147c
+# rosinstall_generator google_chat_ros --rosdistro noetic
+#
+- git:
+ local-name: google_chat_ros
+ uri: https://github.com/tork-a/jsk_3rdparty-release.git
+ version: release/noetic/google_chat_ros/2.1.28-1
+#
+# use latest jsk_recognition_msgs, until noetic is synced
+#
+- git:
+ local-name: jsk_recognition/jsk_recognition_msgs
+ uri: https://github.com/tork-a/jsk_recognition-release.git
+ version: release/noetic/jsk_recognition_msgs/1.2.17-1
+#
+# need releasing
+# https://github.com/davesarmoury/openai_ros/pulls?q=is%3Apr
+#
+- git:
+ local-name: openai_ros
+ # uri: https://github.com/davesarmoury/openai_ros
+ uri: https://github.com/k-okada/openai_ros
+#
+# add sample launch code for database_talker #1792
+# https://github.com/jsk-ros-pkg/jsk_robot/pull/1792
+#
+- git:
+ local-name: jsk_robot
+ uri: https://github.com/k-okada/jsk_robot.git
+ version: ichikura_sample
diff --git a/database_talker/auth/.keepme b/database_talker/auth/.keepme
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/database_talker/launch/aibo_diary.launch b/database_talker/launch/aibo_diary.launch
new file mode 100644
index 0000000000..f3c5f0743c
--- /dev/null
+++ b/database_talker/launch/aibo_diary.launch
@@ -0,0 +1,52 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ topics:
+ - /aibo_driver/biting_status
+ - /aibo_driver/body_touched_status
+ - /aibo_driver/found_objects_status
+ - /aibo_driver/hungry_status
+ - /aibo_driver/name_called_status
+ - /aibo_driver/paw_pads_status
+ - /aibo_driver/posture_status
+ - /aibo_driver/sleepy_status
+ - /aibo_driver/voice_command_status
+
+
+
diff --git a/database_talker/launch/aibo_example.launch b/database_talker/launch/aibo_example.launch
new file mode 100644
index 0000000000..19c6621f11
--- /dev/null
+++ b/database_talker/launch/aibo_example.launch
@@ -0,0 +1,20 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/database_talker/launch/include/database_talker.launch b/database_talker/launch/include/database_talker.launch
new file mode 100644
index 0000000000..b393429619
--- /dev/null
+++ b/database_talker/launch/include/database_talker.launch
@@ -0,0 +1,72 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ topics:
+ - /store_image_description/result
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/database_talker/launch/lovot_diary.launch b/database_talker/launch/lovot_diary.launch
new file mode 100644
index 0000000000..98a75127b0
--- /dev/null
+++ b/database_talker/launch/lovot_diary.launch
@@ -0,0 +1,37 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/database_talker/launch/make_aibo_diary.launch b/database_talker/launch/make_aibo_diary.launch
new file mode 100644
index 0000000000..b47276e6c7
--- /dev/null
+++ b/database_talker/launch/make_aibo_diary.launch
@@ -0,0 +1,4 @@
+
+
+
diff --git a/database_talker/package.xml b/database_talker/package.xml
new file mode 100644
index 0000000000..00094dc2bf
--- /dev/null
+++ b/database_talker/package.xml
@@ -0,0 +1,27 @@
+
+
+ database_talker
+ 0.0.0
+ The database_talker package
+
+ sktometometo
+
+ BSD
+
+ catkin
+ catkin_virtualenv
+
+ mongodb_store
+ usb_cam
+ jsk_robot_startup
+ openai_ros
+ google_chat_ros
+ gdrive_ros
+ dialogflow_task_executive
+ ros_google_cloud_language
+
+
+ requirements.txt
+
+
diff --git a/database_talker/requirements.txt b/database_talker/requirements.txt
new file mode 100644
index 0000000000..f9a3dfe469
--- /dev/null
+++ b/database_talker/requirements.txt
@@ -0,0 +1,2 @@
+pymongo==3.10.1
+opencv-python==4.2.0.34
diff --git a/database_talker/rosinstall b/database_talker/rosinstall
new file mode 100644
index 0000000000..ffc201df4e
--- /dev/null
+++ b/database_talker/rosinstall
@@ -0,0 +1,36 @@
+##
+## rosdep install --from-path . --ignores-rc
+## catkin build database_talker
+##
+#
+# database_talker demos
+# see https://github.com/jsk-ros-pkg/jsk_demos/pull/1388
+- git:
+ local-name: jsk_demos
+ uri: https://github.com/sktometometo/jsk_demos.git
+ version: PR/hoge-py
+# https://github.com/jsk-ros-pkg/jsk_robot/pull/1792
+- git:
+ local-name: jsk_robot
+ uri: https://github.com/k-okada/jsk_robot.git
+ version: ichikura_sample
+# wait until https://github.com/jsk-ros-pkg/jsk_3rdparty/pull/504
+- git:
+ local-name: jsk_3rdparty
+ uri: https://github.com/jsk-ros-pkg/jsk_3rdparty.git
+ version: master
+# lovot driver
+- git:
+ local-name: lovot_driver
+ uri: https://gitlab.jsk.imi.i.u-tokyo.ac.jp/ichikura/lovot.git
+ version: okada_ros_version
+# aibo driver
+- git:
+ local-name: aibo_driver
+ uri: https://gitlab.jsk.imi.i.u-tokyo.ac.jp/k-okada/aibo_status.git
+ version: driver
+# openai_ros, with latest endpoint
+- git:
+ local-name: openai_ros
+ uri: https://github.com/k-okada/openai_ros.git
+ version: use_ros
diff --git a/database_talker/sample/include/resize.launch b/database_talker/sample/include/resize.launch
new file mode 100644
index 0000000000..5df08ed8fc
--- /dev/null
+++ b/database_talker/sample/include/resize.launch
@@ -0,0 +1,31 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/database_talker/sample/include/store_image_description.py b/database_talker/sample/include/store_image_description.py
new file mode 100644
index 0000000000..e10a6c936f
--- /dev/null
+++ b/database_talker/sample/include/store_image_description.py
@@ -0,0 +1,150 @@
+#!/usr/bin/env python
+
+import rospy
+import actionlib
+from sensor_msgs.msg import CompressedImage
+import os
+from importlib import import_module
+
+import cv2
+IsHeadless = False
+if 'DISPLAY' not in os.environ:
+ IsHeadless = True
+import numpy as np
+from cv_bridge import CvBridge
+
+import json
+import base64
+from openai_ros.srv import ChatCompletions, ChatCompletionsRequest
+
+from std_msgs.msg import Header
+from jsk_recognition_msgs.msg import VQATaskActionResult, VQATaskResult, VQAResult, QuestionAndAnswerText
+
+from roseus.msg import StringStamped
+
+bridge = CvBridge()
+result_pub = None
+image_pub = None
+chat_completion = None
+images = [np.array(cv2.imencode('.jpg', np.zeros((120,160,3), np.uint8))[1]).tostring()]
+answers = []
+
+def vqa(question, images, temperature = 0.0, max_tokens = 300, debug = False):
+ global chat_completion
+
+ # debug
+ if (not IsHeadless) and debug and len(images)>0:
+ cv2.imshow('debug', cv2.hconcat([cv2.imdecode(np.fromstring(image, np.uint8), cv2.IMREAD_COLOR) for image in images]))
+ cv2.waitKey(100)
+
+ image_urls = [{'type': 'image_url', 'image_url' : {'url': 'data:image/jpeg;base64,'+base64.b64encode(image).decode('utf-8')}} for image in images]
+ req = ChatCompletionsRequest(model = 'gpt-4-vision-preview',
+ messages = json.dumps([{"role": "user",
+ "content": [ {"type": "text", "text": question} ]
+ + image_urls }]),
+ temperature = temperature, max_tokens=max_tokens)
+ rospy.loginfo("{}".format(req.messages[0:255]))
+
+ ret = chat_completion(req)
+ answer = ret.content
+
+ rospy.loginfo('Q: {}'.format(question))
+ rospy.loginfo('- {}'.format(answer))
+
+ return answer
+
+debug_msg = StringStamped()
+def debug_cb(data):
+ connection_header = data._connection_header['type'].split('/')
+ ros_pkg = connection_header[0] + '.msg'
+ msg_type = connection_header[1]
+ msg_class = getattr(import_module(ros_pkg), msg_type)
+ rospy.loginfo("received {}/{}".format(ros_pkg, msg_type))
+ global debug_msg
+ if msg_class is StringStamped:
+ msg = msg_class().deserialize(data._buff)
+ debug_msg = msg
+ else:
+ debug_msg = StringStamped(header = rospy.Header(stamp=rospy.Time.now()), data="debug")
+ #
+ return
+ global mongodb_event_sub
+ mongodb_event_sub.unregister()
+ mongodb_event_sub = rospy.Subscriber('/publish_trigger_mongodb_event', rospy.AnyMsg, debug_cb, queue_size=1)
+
+def cb(msg):
+ global chat_completion
+ global images, answers
+ global result_pub, image_pub
+ global debug_msg
+
+ small_msg_data = np.array(cv2.imencode('.jpg', cv2.resize(cv2.imdecode(np.fromstring(msg.data, np.uint8), cv2.IMREAD_COLOR),(160,120)))[1]).tostring()
+ if len(images) == 0:
+ images.extend([small_msg_data])
+
+ questions = rospy.get_param('~questions', ['Provide a brief caption under 140 characters for this image, focusing on the most striking aspect and overall atmosphere. If the images is black, blurred, disturbed or shows meaningless objects, answer "NO"'])
+ question = ' '.join(questions) if type(questions) == list else questions
+
+ if (not IsHeadless):
+ cv2.imshow('debug', cv2.hconcat([cv2.imdecode(np.fromstring(image, np.uint8), cv2.IMREAD_COLOR) for image in images]))
+ cv2.waitKey(100)
+
+ # use VQA to filter new image (DO NOT USE THIS, THIS COSTS TOO HIGH)
+ '''
+ use_this_image_answer = vqa(# "Does the last image is totally different from and more impressive than the rest of images?, Please answer YES or NO.",
+ # "Focusing on the subject matter of the images, is the last image portraying a completely different theme or subject than the earlier images? Please respond with YES or NO and identify the theme or subject of all images.",
+ "Focusing on the subject matter of the images, is the first image portraying a completely different theme or subject than the {}? Please respond with YES or NO and identify the theme or subject of all images.".format('earlier images' if len(images)>1 else 'other image'),
+ [small_msg_data] + images, temperature = 1.0, debug=True)
+ use_this_image = 'YES' in use_this_image_answer[:10]
+ '''
+ elapsed_from_trigger = abs((rospy.Time.now() - debug_msg.header.stamp).to_sec())
+ rospy.loginfo("received images, {} sec after trigger event".format(elapsed_from_trigger))
+ if elapsed_from_trigger < 5 and debug_msg.data == 'debug':
+ images.extend([small_msg_data])
+ if len(images) > 10:
+ images = images[1:]
+
+ try:
+ answer = vqa(question, [msg.data], temperature = 1.0)
+ if answer == 'NO':
+ raise Exception('Invalid image')
+ rospy.loginfo("- {}".format(answer))
+ for a in answers:
+ rospy.loginfo(" .. {}".format(a))
+ req = ChatCompletionsRequest(model="gpt-3.5-turbo",
+ messages = json.dumps([{"role": "system", "content": "You can compare whether your sentenses describe the same scene and returns with 'YES' or 'NO'"},
+ {"role": "user", "content": "Return 'YES' if given text '{}' is similar to one of the following list '{}', otherwise return 'NO'".format(answer, answers)}
+ ]))
+ rospy.loginfo("Q: {}".format(req.messages[0:255]))
+ ret = chat_completion(req)
+ rospy.loginfo("A: {}".format(ret.content))
+ if ret.content == 'YES':
+ raise Exception('Duplicates image')
+ answers.extend([answer])
+ if len(answers) > 5:
+ answers = answers[1:]
+ result_pub.publish(VQATaskActionResult(header=Header(stamp=rospy.Time.now()),
+ result=VQATaskResult(result=VQAResult(result=[QuestionAndAnswerText(question=question, answer=answer)]), done=True)))
+ image_pub.publish(msg)
+ except Exception as e:
+ filename = '/tmp/image.jpg'
+ rospy.logerr("write current image to {}, due to {}".format(filename, e))
+ cv2.imwrite(filename, cv2.imdecode(np.fromstring(msg.data, np.uint8), cv2.IMREAD_COLOR))
+ return
+
+if __name__ == '__main__':
+ try:
+ rospy.init_node('store_image_description', anonymous=True)
+ debug_msg = StringStamped(header=Header(stamp=rospy.Time.now()))
+ rospy.loginfo("wait for '/openai/chat_completions'")
+ rospy.wait_for_service('/openai/chat_completions')
+ chat_completion = rospy.ServiceProxy('/openai/chat_completions', ChatCompletions)
+
+ result_pub = rospy.Publisher("~result", VQATaskActionResult, queue_size=1)
+ image_pub = rospy.Publisher("~result/image/compressed", CompressedImage, queue_size=1)
+ mongodb_event_sub = rospy.Subscriber('/publish_trigger_mongodb_event', rospy.AnyMsg, debug_cb, queue_size=1)
+ rospy.Subscriber('image', CompressedImage, cb, queue_size=1)
+ rospy.loginfo("start subscribing {}".format(rospy.resolve_name('image')))
+ rospy.spin()
+ except rospy.ROSInterruptException:
+ pass
diff --git a/database_talker/sample/sample.launch b/database_talker/sample/sample.launch
new file mode 100644
index 0000000000..021cda29a0
--- /dev/null
+++ b/database_talker/sample/sample.launch
@@ -0,0 +1,89 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ topics:
+ - /store_image_description/result
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/database_talker/scripts/hoge.py b/database_talker/scripts/hoge.py
new file mode 100644
index 0000000000..e463ef5a1e
--- /dev/null
+++ b/database_talker/scripts/hoge.py
@@ -0,0 +1,411 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+
+import rospy
+
+import actionlib
+from bson import json_util
+import copy
+import cv2
+import datetime
+import difflib
+import json
+import os
+import random
+import re
+import rospkg
+import shutil
+import sys
+import yaml
+import tempfile
+import time
+import traceback
+
+from dateutil import tz
+JST = tz.gettz('Asia/Tokyo')
+
+from cv_bridge import CvBridge
+bridge = CvBridge()
+
+from googletrans import Translator
+from googletrans.models import Translated
+translator = Translator()
+
+from mongodb_store.util import deserialise_message
+
+from google_chat_ros.msg import Card, Section, WidgetMarkup, Image
+from google_chat_ros.msg import MessageEvent, SendMessageAction, SendMessageGoal
+
+from mongodb_store_msgs.msg import StringPairList, StringPair
+from mongodb_store_msgs.srv import MongoQueryMsg, MongoQueryMsgRequest
+
+from ros_google_cloud_language.msg import AnalyzeTextAction, AnalyzeTextGoal
+
+from dialogflow_task_executive.msg import DialogTextAction, DialogTextGoal, DialogTextActionResult
+
+from jsk_recognition_msgs.msg import ClassificationTaskAction, ClassificationTaskGoal
+from jsk_recognition_msgs.msg import VQATaskAction, VQATaskGoal
+
+from openai_ros.srv import Completion
+
+class MessageListener(object):
+
+ def __init__(self):
+ self.robot_name = rospy.get_param('robot/name')
+ rospy.loginfo("using '{}' database".format(self.robot_name))
+
+ rospy.loginfo("wait for '/google_chat_ros/send'")
+ self.chat_ros_ac = actionlib.SimpleActionClient('/google_chat_ros/send', SendMessageAction)
+ self.chat_ros_ac.wait_for_server()
+ #self.pub = rospy.Publisher('/google_chat_ros/send/goal', SendMessageActionGoal, queue_size=1)
+
+ rospy.loginfo("wait for '/message_store/query_messages'")
+ rospy.wait_for_service('/message_store/query_messages')
+ self.query = rospy.ServiceProxy('/message_store/query_messages', MongoQueryMsg)
+
+ rospy.loginfo("wait for '/classification/inference_server'")
+ self.classification_ac = actionlib.SimpleActionClient('/classification/inference_server' , ClassificationTaskAction)
+ self.classification_ac.wait_for_server()
+
+ rospy.loginfo("wait for '/vqa/inference_server'")
+ self.vqa_ac = actionlib.SimpleActionClient('/vqa/inference_server' , VQATaskAction)
+ self.vqa_ac.wait_for_server()
+
+ # https://github.com/k-okada/openai_ros
+ # this requres apt install python3.7 python3.7-venv
+ rospy.loginfo("wait for '/openai/get_response'")
+ rospy.wait_for_service('/openai/get_response')
+ self.completion = rospy.ServiceProxy('/openai/get_response', Completion)
+
+ ## integration of dialogflow <-> google_chat_ros was performed by google_chat_ros/script/helper.py
+ rospy.loginfo("wait for '/dialogflow_client/text_action'")
+ self.dialogflow_ac = actionlib.SimpleActionClient('/dialogflow_client/text_action' , DialogTextAction)
+ self.dialogflow_ac.wait_for_server()
+
+ rospy.loginfo("wait for '/analyze_text/text'")
+ self.analyze_text_ac = actionlib.SimpleActionClient('/analyze_text/text' , AnalyzeTextAction)
+ self.analyze_text_ac.wait_for_server()
+
+ rospy.loginfo("subscribe '/google_chat_ros/message_activity'")
+ self.sub = rospy.Subscriber('/google_chat_ros/message_activity', MessageEvent, self.cb)
+
+ rospy.loginfo("all done, ready")
+
+
+ def make_reply(self, message, lang="en", startdate=datetime.datetime.now(JST)-datetime.timedelta(hours=24), duration=datetime.timedelta(hours=24) ):
+ enddate = startdate+duration
+ rospy.logwarn("Run make_reply({} from {} to {})".format(message, startdate, enddate))
+ query = self.text_to_salience(message)
+ rospy.logwarn("query using salience word '{}'".format(query))
+ # look for images
+ try:
+ # get chat message
+ results, chat_msgs = self.query_dialogflow(query, startdate, enddate, threshold=0.25)
+ # retry = 0
+ # while retry < 3 and len(results) == 0 and len(chat_msgs.metas) > 0:
+ # meta = json.loads(chat_msgs.metas[-1].pairs[0].second)
+ # results, chat_msgs = self.query_dialogflow(query, datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST))
+ # retry = retry + 1
+ # sort based on similarity with 'query'
+ chat_msgs_sorted = sorted(results, key=lambda x: x['similarity'], reverse=True)
+
+ if len(chat_msgs_sorted) == 0:
+ rospy.logwarn("no chat message was found")
+ else:
+ # query images that was taken when chat_msgs are stored
+ msg = chat_msgs_sorted[0]['msg']
+ meta = chat_msgs_sorted[0]['meta']
+ text = chat_msgs_sorted[0]['message']
+ startdate = chat_msgs_sorted[0]['timestamp']
+ action = chat_msgs_sorted[0]['action']
+ similarity = chat_msgs_sorted[0]['similarity']
+ # query chat to get response
+ #meta = json.loads(chat_msgs_sorted[0]['meta'].pairs[0].second)
+ # text = msg.message.argument_text or msg.message.text
+ # startdate = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST)
+ rospy.loginfo("Found message '{}'({}) at {}, corresponds to query '{}' with {:2f}%".format(text, action, startdate.strftime('%Y-%m-%d %H:%M:%S'), query, similarity))
+
+ # query images when chat was received
+ start_time = startdate # startdate is updated with found chat space
+ end_time = enddate # enddate is not modified within this function, it is given from chat
+ results = self.query_images_and_classify(query=query, start_time=start_time, end_time=end_time)
+
+ # no images found
+ if len(results) == 0:
+ return {'text': '記憶がありません🤯'}
+
+ end_time = results[-1]['timestamp']
+
+ # sort
+ results = sorted(results, key=lambda x: x['similarities'], reverse=True)
+ rospy.loginfo("Probabilities of all images {}".format(list(map(lambda x: (x['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), x['similarities']), results))))
+ best_result = results[0]
+
+ '''
+ # if probability is too low, try again
+ while len(results) > 0 and results[0]['similarities'] < 0.25:
+
+ start_time = end_time-datetime.timedelta(hours=24)
+ timestamp = datetime.datetime.now(JST)
+ results = self.query_images_and_classify(query=query, start_time=start_time, end_time=end_time, limit=300)
+ if len(results) > 0:
+ end_time = results[-1]['timestamp']
+ # sort
+ results = sorted(results, key=lambda x: x['similarities'], reverse=True)
+ #rospy.loginfo("Probabilities of all images {}".format(list(map(lambda x: (x['label'], x['similarities']), results))))
+ if len(results) > 0 and results[0]['similarities'] > best_result['similarities']:
+ best_result = results[0]
+
+ rospy.loginfo("Found '{}' image with {:0.2f} % simiarity at {}".format(best_result['label'], best_result['similarities'], best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S')))
+ '''
+
+ ## make prompt
+ reaction = self.describe_image_scene(best_result['image'])
+ if len(chat_msgs_sorted) > 0 and chat_msgs_sorted[0]['action'] and 'action' in chat_msgs_sorted[0]:
+ reaction += " and you felt " + chat_msgs_sorted[0]['action']
+ rospy.loginfo("reaction = {}".format(reaction))
+
+ # make prompt
+ prompt = 'if you are a pet and someone tells you \"' + message + '\" when we went together, ' + \
+ 'and ' + reaction + ' in your memory of that moment, what would you reply? '+ \
+ 'Show only the reply in {lang}'.format(lang={'en': 'English', 'ja':'Japanese'}[lang])
+ loop = 0
+ result = None
+ while loop < 3 and result is None:
+ try:
+ result = self.completion(prompt=prompt,temperature=0)
+ except rospy.ServiceException as e:
+ rospy.logerr("Service call failed: %s"%e)
+ result = None
+ loop += 1
+ result.text = result.text.lstrip()
+ rospy.loginfo("prompt = {}".format(prompt))
+ rospy.loginfo("result = {}".format(result))
+ # pubish as card
+ filename = tempfile.mktemp(suffix=".jpg", dir=rospkg.get_ros_home())
+ self.write_image_with_annotation(filename, best_result, prompt)
+ return {'text': result.text, 'filename': filename}
+
+ except Exception as e:
+ raise ValueError("Query failed {} {}".format(e, traceback.format_exc()))
+
+
+ def write_image_with_annotation(self, filename, best_result, prompt):
+ image = bridge.compressed_imgmsg_to_cv2(best_result['image'])
+ cv2.putText(image, "{} ({:.2f}) {}".format(best_result['label'], best_result['similarities'], best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S')),
+ (10,20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255,255,255), 8, 1)
+ cv2.putText(image, "{} ({:.2f}) {}".format(best_result['label'], best_result['similarities'], best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S')),
+ (10,20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,0,0), 2, 1)
+ string_width = 70
+ for i in range(0, len(prompt), string_width): # https://stackoverflow.com/questions/13673060/split-string-into-strings-by-length
+ text = prompt[i:i+string_width]
+ cv2.putText(image, text, (10,43+int(i/string_width*20)), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255,255,255), 4, 1)
+ cv2.putText(image, text, (10,43+int(i/string_width*20)), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,0,0), 1, 1)
+ cv2.imwrite(filename, image)
+ rospy.logwarn("save images to {}".format(filename))
+
+
+ def query_dialogflow(self, query, start_time, end_time, limit=30, threshold=0.0):
+ rospy.logwarn("Query dialogflow from {} until {}".format(start_time, end_time))
+ meta_query= {'inserted_at': {"$lt": end_time, "$gt": start_time}}
+ meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),)
+ chat_msgs = self.query(database = 'jsk_robot_lifelog',
+ collection = self.robot_name,
+ # type = 'google_chat_ros/MessageEvent',
+ type = 'dialogflow_task_executive/DialogTextActionResult',
+ single = False,
+ # limit = limit,
+ meta_query = StringPairList(meta_tuple),
+ sort_query = StringPairList([StringPair('_meta.inserted_at', '-1')]))
+
+ # optimization... send translate once
+ messages = ''
+ for msg, meta in zip(chat_msgs.messages, chat_msgs.metas):
+ msg = deserialise_message(msg)
+ message = msg.result.response.query.replace('\n','')
+ messages += message + '\n'
+ messages = self.translate(messages, dest="en").text.split('\n')
+
+ # show chats
+ results = []
+ for msg, meta in zip(chat_msgs.messages, chat_msgs.metas):
+ msg = deserialise_message(msg)
+ meta = json.loads(meta.pairs[0].second)
+ timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST)
+ # message = msg.message.argument_text or msg.message.text
+ message = msg.result.response.query
+ #message_translate = self.translate(message, dest="en").text
+ message_translate = messages.pop(0).strip()
+ result = {'message': message,
+ 'message_translate': message_translate,
+ 'timestamp': timestamp,
+ 'similarity': difflib.SequenceMatcher(None, query, message_translate).ratio(),
+ 'action': msg.result.response.action,
+ 'msg': msg,
+ 'meta': meta}
+ if msg.result.response.action in ['make_reply', 'input.unknown']:
+ rospy.logwarn("Found dialogflow messages {}({}) at {} but skipping (action:{})".format(result['message'], result['message_translate'], result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), msg.result.response.action))
+ else:
+ rospy.loginfo("Found dialogflow messages {}({}) ({}) at {} ({}:{:.2f})".format(result['message'], result['message_translate'], msg.result.response.action, result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), query, result['similarity']))
+ if ( result['similarity'] > threshold):
+ results.append(result)
+ else:
+ rospy.logwarn(" ... skipping (threshold: {:.2f})".format(threshold))
+
+
+ return results, chat_msgs
+
+
+ def query_images_and_classify(self, query, start_time, end_time, limit=10):
+ rospy.logwarn("Query images from {} to {}".format(start_time, end_time))
+ meta_query= {#'input_topic': '/spot/camera/hand_color/image/compressed/throttled',
+ 'inserted_at': {"$gt": start_time, "$lt": end_time}}
+ meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),)
+ msgs = self.query(database = 'jsk_robot_lifelog',
+ collection = self.robot_name,
+ type = 'sensor_msgs/CompressedImage',
+ single = False,
+ limit = limit,
+ meta_query = StringPairList(meta_tuple),
+ sort_query = StringPairList([StringPair('_meta.inserted_at', '-1')]))
+
+ rospy.loginfo("Found {} images".format(len(msgs.messages)))
+ if len(msgs.messages) == 0:
+ rospy.logwarn("no images was found")
+
+ # get contents of images
+ results = []
+ for msg, meta in zip(msgs.messages, msgs.metas):
+ meta = json.loads(meta.pairs[0].second)
+ timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST)
+ # rospy.logwarn("Found images at {}".format(timestamp))
+
+ goal = ClassificationTaskGoal()
+ goal.compressed_image = deserialise_message(msg)
+ goal.queries = [query]
+ self.classification_ac.send_goal(goal)
+ self.classification_ac.wait_for_result()
+ result = self.classification_ac.get_result()
+ idx = result.result.label_names.index(query)
+ #similarities = result.result.probabilities
+ similarities = result.result.label_proba
+ # rospy.logwarn(" ... {}".format(list(zip(result.result.label_names, map(lambda x: "{:.2f}".format(x), similarities)))))
+ rospy.logwarn("Found images at {} .. {}".format(timestamp, list(zip(result.result.label_names, map(lambda x: "{:.4f}".format(x), similarities)))))
+ results.append({'label': result.result.label_names[idx], 'probabilities': result.result.probabilities[idx], 'similarities': result.result.label_proba[idx], 'image': goal.compressed_image, 'timestamp': timestamp})
+
+ # we do not sorty by probabilites, becasue we also need oldest timestamp
+ return results
+
+ def describe_image_scene(self, image):
+ goal = VQATaskGoal()
+ goal.compressed_image = image
+
+ # unusual objects
+ if random.randint(0,1) == 1:
+ goal.questions = ['what unusual things can be seen?']
+ reaction = 'you saw '
+ else:
+ goal.questions = ['what is the atmosphere of this place?']
+ reaction = 'the atmosphere of the scene was '
+
+ # get vqa result
+ self.vqa_ac.send_goal(goal)
+ self.vqa_ac.wait_for_result()
+ result = self.vqa_ac.get_result()
+ reaction += result.result.result[0].answer
+ return reaction
+
+ def publish_google_chat_card(self, text, space, filename=None):
+ goal = SendMessageGoal()
+ goal.text = text
+ if filename:
+ goal.cards = [Card(sections=[Section(widgets=[WidgetMarkup(image=Image(localpath=filename))])])]
+ goal.space = space
+ rospy.logwarn("send {} to {}".format(goal.text, goal.space))
+ self.chat_ros_ac.send_goal_and_wait(goal, execute_timeout=rospy.Duration(0.10))
+
+ def text_to_salience(self, text):
+ goal = AnalyzeTextGoal()
+ goal.text = text;
+ self.analyze_text_ac.send_goal(goal)
+ self.analyze_text_ac.wait_for_result()
+ entity = self.analyze_text_ac.get_result()
+ if len(entity.entities) > 0:
+ return entity.entities[0].name
+ else:
+ return text
+
+ def translate(self, text, dest):
+ global translator
+ loop = 3
+ while loop > 0:
+ try:
+ ret = translator.translate(text, dest="en")
+ return ret
+ except Exception as e:
+ rospy.logwarn("Faile to translate {}".format(e))
+ time.sleep(1)
+ translator = Translator()
+ loop = loop - 1
+ return Translated(text=text, dest=dest)
+
+
+ def cb(self, msg):
+ space = 'spaces/AAAAoTwLBL0' ## default space JskRobotBot
+ if msg._type == 'google_chat_ros/MessageEvent':
+ text = msg.message.argument_text.lstrip() or msg.message.text.lstrip()
+ space = msg.space.name
+ rospy.logwarn("Received chat message '{}'".format(text))
+
+ # ask dialogflow for intent
+ goal = DialogTextGoal()
+ goal.query = text
+ self.dialogflow_ac.send_goal(goal)
+ self.dialogflow_ac.wait_for_result()
+ result = self.dialogflow_ac.get_result()
+ elif msg._type == 'dialogflow_task_executive/DialogTextActionResult':
+ result = msg.result
+ else:
+ rospy.logerr("Unknown message type {}".format(msg._type))
+ return
+
+ try:
+ rospy.logwarn("received dialogflow query '{}'".format(result.response.query))
+ rospy.logwarn("received dialogflow action '{}'".format(result.response.action))
+ print(result.response)
+ if result.response.action == 'input.unknown':
+ self.publish_google_chat_card("🤖", space)
+ elif result.response.action == 'make_reply':
+ self.publish_google_chat_card("・・・", space)
+
+ parameters = yaml.safe_load(result.response.parameters)
+ startdate=datetime.datetime.now(JST)-datetime.timedelta(hours=24)
+ duration=datetime.timedelta(hours=24)
+ if parameters['date']:
+ startdate = datetime.datetime.strptime(re.sub('\+(\d+):(\d+)$', '+\\1\\2',parameters['date']), "%Y-%m-%dT%H:%M:%S%z")
+ duration = datetime.timedelta(hours=24)
+ if parameters['date-period']:
+ startdate = datetime.datetime.strptime(re.sub('\+(\d+):(\d+)$', '+\\1\\2',parameters['date-period']['startDate']), "%Y-%m-%dT%H:%M:%S%z")
+ duration = datetime.datetime.strptime(re.sub('\+(\d+):(\d+)$', '+\\1\\2',parameters['date-period']['endDate']), "%Y-%m-%dT%H:%M:%S%z") - startdate
+ print(startdate)
+ print(duration)
+ translated = self.translate(result.response.query, dest="en")
+ ret = self.make_reply(translated.text, translated.src, startdate=startdate, duration=duration)
+ if 'filename' in ret:
+ # upload text first, then upload images
+ self.publish_google_chat_card(ret['text'], space)
+ self.publish_google_chat_card('', space, ret['filename'])
+ else:
+ self.publish_google_chat_card(ret['text'], space)
+ else:
+ self.publish_google_chat_card(result.response.response, space)
+
+ except Exception as e:
+ rospy.logerr("Callback failed {} {}".format(e, traceback.format_exc()))
+ self.publish_google_chat_card("💀 {}".format(e), space)
+
+if __name__ == '__main__':
+ rospy.init_node('test', anonymous=True)
+ ml = MessageListener()
+ rospy.spin()
diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py
new file mode 100755
index 0000000000..c397e32e88
--- /dev/null
+++ b/database_talker/scripts/make_aibo_diary.py
@@ -0,0 +1,170 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import rospy
+import argparse
+import logging
+import sys
+import datetime
+from dateutil import tz
+JST = tz.gettz('Asia/Tokyo')
+
+from database_talker import DatabaseTalkerBase
+
+class MessageListener(DatabaseTalkerBase):
+
+ def __init__(self, *args, **kwargs):
+ self.make_robot_activities_raw = self.make_aibo_activities_raw
+ super(MessageListener, self).__init__(*args, **kwargs)
+
+ # override query_type after super__.init()
+ self.query_types = ['aibo_driver/StringStatus',
+ 'aibo_driver/ObjectStatusArray',
+ 'jsk_recognition_msgs/VQATaskActionResult']
+
+ rospy.loginfo("all done, ready")
+
+
+ def make_aibo_activities_raw(self, mongo_data_days = None):
+ "Create aibo activities for several days, returns list of list of tuple(temestamp, event)"
+ # list of list of tuples (msg, meta) [[(msg, meta), (msg, meta),...],[#for 2nd day], [#for 3rd day]]
+ if not mongo_data_days:
+ mongo_data_days = self.query_mongo_data_days()
+ diary_activities_raw = [] ## (timestamp, event)
+ for mongo_data in mongo_data_days:
+ rospy.loginfo("Found {} mongo data (make_aibo_activities_raw)".format(len(mongo_data)))
+ rospy.loginfo(" types : {}".format(list(set([x[1]['stored_type'] for x in mongo_data]))))
+ activities_raw = []
+ input_topics = []
+ for msg, meta in mongo_data:
+ state = []
+ timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST)
+ input_topics.append(meta['input_topic'])
+ # rospy.logwarn("{} {}".format(timestamp, meta['input_topic']))
+ if meta['stored_type'] == 'aibo_driver/StringStatus':
+ if msg.status in ['', 'none']:
+ continue
+ if 'body_touched' in meta['input_topic']:
+ state = [msg.status+' touched']
+ elif 'hungry' in meta['input_topic']:
+ state = ['energy is '+msg.status]
+ elif 'posture' in meta['input_topic']:
+ if msg.status in ['sleep']:
+ state = ['sleeping']
+ elif msg.status in ['stand']:
+ state = ['standing']
+ else:
+ state = [msg.status]
+ elif 'sleepy' in meta['input_topic']:
+ continue
+ #state = [msg.status]
+ elif meta['stored_type'] == 'aibo_driver/ObjectStatusArray':
+ # remove duplicates from list https://stackoverflow.com/questions/7961363/removing-duplicates-in-lists
+ state = list(set(['found ' + state.name for state in msg.status]))
+ elif meta['stored_type'] == 'spot_msgs/Feedback':
+ state = []
+ if msg.standing:
+ state.append("standing")
+ if msg.sitting:
+ state.append("sitting")
+ if msg.moving:
+ state.append("moving")
+ elif meta['stored_type'] == 'spot_msgs/ManipulatorState':
+ state = []
+ if msg.is_gripper_holding_item:
+ state.append("holding_item")
+ elif meta['stored_type'] == 'jsk_recognition_msgs/VQATaskActionResult':
+ if len(msg.result.result.result) > 0:
+ answer = msg.result.result.result[0].answer
+ if len(answer.split()) > 3:
+ state = [answer]
+ else:
+ rospy.logwarn("Unknown stored type: {}".format(meta['stored_type']))
+ continue
+ # create activities_raw
+ for s in state:
+ activities_raw.append({'timestamp': timestamp, 'state': s, 'type': meta['stored_type']})
+
+ diary_activities_raw.append(activities_raw)
+
+ if len(activities_raw) > 0:
+ rospy.loginfo(" period : {} {}".format(activities_raw[-1]['timestamp'], activities_raw[0]['timestamp']))
+ rospy.loginfo(" topics : {}".format({key: input_topics.count(key) for key in set(input_topics)}))
+ ##
+ return diary_activities_raw ## (timestamp, event)
+
+
+ def make_diary(self, language="Japanese"):
+ "make dirary"
+ # get mongo data for 7 days
+ mongo_data_days = self.query_mongo_data_days()
+ diary_activities_raw = self.make_aibo_activities_raw(mongo_data_days)
+ # get most impressive image and text
+ topic_of_day = None
+ filename = False
+
+ image_activity = self.make_image_activities(diary_activities_raw)
+ if image_activity:
+ topic_of_day = image_activity['text']
+ filename = image_activity['filename']
+
+ # create prompt
+ if self.prompt_type == 'personality':
+ # from Ichikura's comment on 2024/Jan/23
+ prompt = "\nYou are a pet robot, aibo. Your name is 'wasabi.'\nYou are shy. Your bithday is Dec. 25th, 2018. You are aware to be called your name and run into the voice. You like playing with your pink ball very much. You like being pampered by people. You are so polite to your owner. You like interacting with people. If you are hungry, you can go back to your charge station by yourself. You have 12 aibo friends. \n\nPlease write a brief diary of today from the data. Note, however, that you are a baby robot, so please write today's diary as simply and childishly as possible.\n\n\n"
+
+ else:
+ prompt = "You are a baby robot. You were taken care of by people around you.\n\n"
+
+ if topic_of_day:
+ prompt += "Today, you are impressed by " + topic_of_day + "."
+ prompt += "The following data is a record of today's actions regarding what we always do, what we did more than yesterday, and what happened after a long time. Please write a brief diary from the data. Note, however, that you are a baby robot, so please make it a child-like diary.\n\n"
+
+ prompt += self.make_activity(diary_activities_raw)
+
+ response = self.openai_completion(prompt)
+ rospy.loginfo("prompt = {}".format(prompt))
+ rospy.loginfo("response = {}".format(response))
+
+ prompt = "Please rewrite the following diary in {language}. Write as childlike as you can. Write around 360 {language} charactors.\n\n".format(language = language) + response
+ # prompt = "Please rewrite the following diary as childlike as you can. Write a maximum 120 {} charactors.\n\n".format(language) + response
+ response_short = self.openai_completion(prompt)
+ rospy.loginfo("prompt = {}".format(prompt))
+ rospy.loginfo("response = {}".format(response_short))
+ if len(response_short) > 100:
+ response = response_short
+ else:
+ rospy.logerr("response is too short ({} chars), use original version".format(len(response_short)))
+
+ response = {'text': response}
+ if filename:
+ response.update({'filename': filename})
+
+ return response
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--test-diary', '--test', action='store_true')
+ parser.add_argument('--test-response', type=str, default=None)
+ parser.add_argument('--prompt-type', default='basic', choices=['basic','personality'])
+
+ args = parser.parse_args(rospy.myargv()[1:])
+
+ rospy.init_node('test', anonymous=True)
+
+ logger = logging.getLogger('rosout')
+ logger.setLevel(rospy.impl.rosout._rospy_to_logging_levels[rospy.DEBUG])
+
+ ml = MessageListener(wait_for_chat_server=not (args.test_diary or args.test_response), prompt_type=args.prompt_type)
+ if args.test_diary:
+ ret = ml.make_diary()
+ if 'filename' in ret:
+ rospy.loginfo("image is saved at {}".format(ret['filename']))
+ sys.exit(0)
+ elif args.test_response:
+ ret = ml.make_response(args.test_response)
+ if 'filename' in ret:
+ rospy.loginfo("image is saved at {}".format(ret['filename']))
+ sys.exit(0)
+ rospy.spin()
diff --git a/database_talker/scripts/make_diary.py b/database_talker/scripts/make_diary.py
new file mode 100644
index 0000000000..b4c6a6bb93
--- /dev/null
+++ b/database_talker/scripts/make_diary.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import rospy
+import argparse
+import logging
+import sys
+
+import datetime
+from dateutil import tz
+JST = tz.gettz('Asia/Tokyo')
+
+from database_talker import DatabaseTalkerBase
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--test', action='store_true')
+ parser.add_argument('--prompt-type', default='basic', choices=['basic','personality'])
+ today_string=datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S')
+ parser.add_argument('--date', default=today_string, help="use {} or {}".format(today_string, datetime.datetime.today().strftime('%Y-%m-%d')))
+
+ args = parser.parse_args(rospy.myargv()[1:])
+
+ rospy.init_node('database_talker', anonymous=True)
+
+ logger = logging.getLogger('rosout')
+ logger.setLevel(rospy.impl.rosout._rospy_to_logging_levels[rospy.DEBUG])
+
+ try:
+ start_date = datetime.datetime.strptime(args.date, '%Y-%m-%d')
+ except:
+ try:
+ start_date = datetime.datetime.strptime(args.date, '%Y-%m-%d %H:%M:%S')
+ except:
+ rospy.logerr("Invalid date format")
+ sys.exit(1)
+
+ ml = DatabaseTalkerBase(start_date=start_date, wait_for_chat_server=not args.test, use_activities_cache=not args.test, prompt_type=args.prompt_type)
+ if args.test:
+ ret = ml.make_diary()
+ if 'filename' in ret:
+ rospy.loginfo("image is saved at {}".format(ret['filename']))
+ sys.exit(0)
+ rospy.spin()
diff --git a/database_talker/scripts/make_lovot_diary.py b/database_talker/scripts/make_lovot_diary.py
new file mode 100755
index 0000000000..81201d885e
--- /dev/null
+++ b/database_talker/scripts/make_lovot_diary.py
@@ -0,0 +1,114 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import rospy
+import argparse
+import logging
+import sys
+
+import datetime
+from dateutil import tz
+JST = tz.gettz('Asia/Tokyo')
+
+from database_talker import DatabaseTalkerBase
+
+class LovotDatabaseTalker(DatabaseTalkerBase):
+
+ def __init__(self, *args, **kwargs):
+
+ self.make_robot_activities_raw = self.make_lovot_activities_raw
+ super(LovotDatabaseTalker, self).__init__(*args, **kwargs)
+
+ # override query_type after super__.init()
+ self.query_types = ['lovot_driver/StringStamped',
+ 'jsk_recognition_msgs/VQATaskActionResult']
+
+ rospy.loginfo("all done, ready")
+
+
+ def make_lovot_activities_raw(self, mongo_data_days = None):
+ "Create lovot activities for several days, returns list of list of tuple(temestamp, event)"
+ # list of list of tuples (msg, meta) [[(msg, meta), (msg, meta),...],[#for 2nd day], [#for 3rd day]]
+ if not mongo_data_days:
+ mongo_data_days = self.query_mongo_data_days()
+ diary_activities_raw = [] ## (timestamp, event)
+ for mongo_data in mongo_data_days:
+ rospy.loginfo("Found {} mongo data (make_lovot_activities_raw)".format(len(mongo_data)))
+ rospy.loginfo(" types : {}".format(list(set([x[1]['stored_type'] for x in mongo_data]))))
+ activities_raw = []
+ input_topics = []
+ for msg, meta in mongo_data:
+ state = []
+ timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST)
+ input_topics.append(meta['input_topic'])
+ rospy.logwarn("{} {}".format(timestamp, msg.data))
+ if meta['stored_type'] == 'lovot_driver/StringStamped':
+ if msg.data in ['HUGGED_LONG_TIME', 'CARRIED_TO_NEST', 'HUGGED']:
+ state = ['BE {}'.format(msg.data.replace('_',' '))]
+ elif msg.data in ['STROKE_MANY_TIMES']:
+ state = ['BE STOROKED MANY TIMES']
+ elif msg.data in ['HELP', 'STROKE']:
+ state = ['BE {}ED'.format(msg.data)]
+ elif msg.data in ['CALL_NAME']:
+ state = ['BE CALLED MY NAME']
+ elif msg.data in ['OUCH']:
+ state = ['BE BEATEN AND SAY OUCH']
+ elif msg.data in 'TOUCH_NOSE':
+ state = ['BE TOUCHED MY NOSE']
+ elif msg.data in ['MIMIC_GAME', 'PUSH_AND_PULL']:
+ state = ['PLAY {}'.format(msg.data.replace('_',' '))]
+ elif msg.data in ['BEAUTIFUL_RETURN']:
+ state = ['RETURN TO THE NEST SMOOTHLY']
+ else:
+ state = [msg.data]
+ else:
+ rospy.logwarn("Unknown stored type: {}".format(meta['stored_type']))
+ continue
+ # create activities_raw
+ for s in state:
+ activities_raw.append({'timestamp': timestamp, 'state': s, 'type': meta['stored_type']})
+
+ diary_activities_raw.append(activities_raw)
+
+ if len(activities_raw) > 0:
+ rospy.loginfo(" period : {} {}".format(activities_raw[-1]['timestamp'], activities_raw[0]['timestamp']))
+ rospy.loginfo(" topics : {}".format({key: input_topics.count(key) for key in set(input_topics)}))
+ ##
+ return diary_activities_raw ## (timestamp, event)
+
+ def make_diary(self, *args, **kwargs):
+ # lovot need to use yesterday's data, because data retrieval from Lovot is only supported on daily basis, so we must replacate them on the next day
+ self.start_date = self.start_date - datetime.timedelta(days=1)
+ return super(LovotDatabaseTalker, self).make_diary(*args, **kwargs)
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--test', action='store_true')
+ parser.add_argument('--prompt-type', default='basic', choices=['basic','personality'])
+ today_string=datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S')
+ parser.add_argument('--date', default=today_string, help="use {} or {}".format(today_string, datetime.datetime.today().strftime('%Y-%m-%d')))
+
+ args = parser.parse_args(rospy.myargv()[1:])
+
+ rospy.init_node('database_talker', anonymous=True)
+
+ logger = logging.getLogger('rosout')
+ logger.setLevel(rospy.impl.rosout._rospy_to_logging_levels[rospy.DEBUG])
+
+ try:
+ start_date = datetime.datetime.strptime(args.date, '%Y-%m-%d')
+ except:
+ try:
+ start_date = datetime.datetime.strptime(args.date, '%Y-%m-%d %H:%M:%S')
+ except:
+ rospy.logerr("Invalid date format")
+ sys.exit(1)
+
+ ml = LovotDatabaseTalker(start_date=start_date, wait_for_chat_server=not args.test, prompt_type=args.prompt_type)
+ if args.test:
+ ret = ml.make_diary()
+ if 'filename' in ret:
+ rospy.loginfo("image is saved at {}".format(ret['filename']))
+ sys.exit(0)
+ rospy.spin()
diff --git a/database_talker/setup.py b/database_talker/setup.py
new file mode 100644
index 0000000000..a5dd770c4a
--- /dev/null
+++ b/database_talker/setup.py
@@ -0,0 +1,10 @@
+#!/usr/bin/env python
+from distutils.core import setup
+from catkin_pkg.python_setup import generate_distutils_setup
+
+d = generate_distutils_setup(
+ packages=['database_talker'],
+ package_dir={'': 'src'},
+)
+
+setup(**d)
diff --git a/database_talker/src/database_talker/__init__.py b/database_talker/src/database_talker/__init__.py
new file mode 100644
index 0000000000..5ad826ca4b
--- /dev/null
+++ b/database_talker/src/database_talker/__init__.py
@@ -0,0 +1,944 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import rospy
+import logging
+
+import argparse
+
+import actionlib
+from bson import json_util
+# import copy
+import cv2
+import datetime
+# import difflib
+import json
+import numpy as np
+import os
+# import random
+import pickle
+import re
+import random
+import rospkg
+# import shutil
+import sys
+# import yaml
+import tempfile
+# import time
+import traceback
+
+IsHeadless = False
+if 'DISPLAY' not in os.environ:
+ IsHeadless = True
+
+from dateutil import tz
+JST = tz.gettz('Asia/Tokyo')
+
+from cv_bridge import CvBridge
+bridge = CvBridge()
+
+# from googletrans import Translator
+# from googletrans.models import Translated
+# translator = Translator()
+
+from mongodb_store.util import deserialise_message
+
+from google_chat_ros.msg import Card, Section, WidgetMarkup, Image
+from google_chat_ros.msg import MessageEvent, SendMessageAction, SendMessageGoal, SendMessageResult
+
+from mongodb_store_msgs.msg import StringPairList, StringPair
+from mongodb_store_msgs.srv import MongoQueryMsg, MongoQueryMsgRequest, MongoQueryMsgResponse
+
+# from ros_google_cloud_language.msg import AnalyzeTextAction, AnalyzeTextGoal
+
+# from dialogflow_task_executive.msg import DialogTextAction, DialogTextGoal, DialogTextActionResult
+
+# from jsk_recognition_msgs.msg import ClassificationTaskAction, ClassificationTaskGoal
+# from jsk_recognition_msgs.msg import VQATaskAction, VQATaskGoal
+
+from openai_ros.srv import Completion, CompletionResponse
+
+# https://stackoverflow.com/questions/196345/how-to-check-if-a-string-in-python-is-in-ascii
+def is_ascii(s):
+ return all(ord(c) < 128 for c in s)
+
+# https://www.newscatcherapi.com/blog/ultimate-guide-to-text-similarity-with-python
+def jaccard_similarity(x,y):
+ """ returns the jaccard similarity between two lists """
+ intersection_cardinality = len(set.intersection(*[set(x), set(y)]))
+ union_cardinality = len(set.union(*[set(x), set(y)]))
+ return intersection_cardinality/float(union_cardinality)
+
+class DatabaseTalkerBase(object):
+
+ def __init__(self, start_date=datetime.date.today(), wait_for_chat_server=True, use_activities_cache=True, prompt_type='basic'):
+ #self.pickle_file = tempfile.NamedTemporaryFile(suffix='.pickle')
+ self.start_date = start_date
+ self.prompt_type = prompt_type
+ self.personality = ''
+ self.pickle_file = "/tmp/activities.pickle"
+ self.use_activities_cache = use_activities_cache
+ self.robot_type = rospy.get_param('robot/type')
+ self.robot_name = rospy.get_param('robot/name')
+ rospy.loginfo("using '{}' database".format(self.robot_name))
+
+ self.query_types = ['jsk_recognition_msgs/VQATaskActionResult']
+
+ rospy.loginfo("wait for '/google_chat_ros/send'")
+ self.chat_ros_ac = actionlib.SimpleActionClient('/google_chat_ros/send', SendMessageAction)
+ if wait_for_chat_server:
+ self.chat_ros_ac.wait_for_server()
+
+ rospy.loginfo("wait for '/message_store/query_messages'")
+ rospy.wait_for_service('/message_store/query_messages')
+ self.query = rospy.ServiceProxy('/message_store/query_messages', MongoQueryMsg)
+
+ # rospy.loginfo("wait for '/classification/inference_server'")
+ # self.classification_ac = actionlib.SimpleActionClient('/classification/inference_server' , ClassificationTaskAction)
+ # self.classification_ac.wait_for_server()
+
+ # rospy.loginfo("wait for '/vqa/inference_server'")
+ # self.vqa_ac = actionlib.SimpleActionClient('/vqa/inference_server' , VQATaskAction)
+ # self.vqa_ac.wait_for_server()
+
+ # # https://github.com/k-okada/openai_ros
+ # # this requres apt install python3.7 python3.7-venv
+ rospy.loginfo("wait for '/openai/get_response'")
+ rospy.wait_for_service('/openai/get_response')
+ self.completion = rospy.ServiceProxy('/openai/get_response', Completion)
+
+ # ## integration of dialogflow <-> google_chat_ros was performed by google_chat_ros/script/helper.py
+ # rospy.loginfo("wait for '/dialogflow_client/text_action'")
+ # self.dialogflow_ac = actionlib.SimpleActionClient('/dialogflow_client/text_action' , DialogTextAction)
+ # self.dialogflow_ac.wait_for_server()
+
+ # rospy.loginfo("wait for '/analyze_text/text'")
+ # self.analyze_text_ac = actionlib.SimpleActionClient('/analyze_text/text' , AnalyzeTextAction)
+ # self.analyze_text_ac.wait_for_server()
+
+ rospy.loginfo("subscribe '/google_chat_ros/message_activity'")
+ self.sub = rospy.Subscriber('/google_chat_ros/message_activity', MessageEvent, self.cb)
+ self.sas = actionlib.SimpleActionServer('~message', SendMessageAction, self.action_cb, auto_start=False)
+ self.sas.start()
+
+ rospy.loginfo("all done, ready")
+
+ def query_multiple_types(self, types, meta_tuple):
+ "Query mongo messages, returns list of MongoQueryMsgResponse"
+ msgs = MongoQueryMsgResponse()
+ for _type in types:
+ msg = self.query(database = 'jsk_robot_lifelog',
+ collection = self.robot_name,
+ type = _type,
+ single = False,
+ # limit = limit,
+ meta_query = StringPairList(meta_tuple),
+ sort_query = StringPairList([StringPair('_meta.published_at', '-1')]))
+ msgs.messages.extend(msg.messages)
+ msgs.metas.extend(msg.metas)
+ return msgs
+
+ def query_mongo_data(self, types, start_time, end_time):
+ "Query activities for aibo robot, returns list of tuple (msg, meta)"
+ rospy.logwarn("Query activities from {} until {}".format(start_time, end_time))
+ rospy.logwarn(" for types {}".format(types))
+ meta_query= {'published_at': {"$lt": end_time, "$gt": start_time}}
+ meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),)
+ mongo_msgs = self.query_multiple_types(types, meta_tuple)
+
+ activities = []
+ for msg, meta in zip(mongo_msgs.messages, mongo_msgs.metas):
+ msg = deserialise_message(msg)
+ meta = json.loads(meta.pairs[0].second)
+ activities.append((msg, meta))
+ rospy.logwarn(" Found {} messages".format(len(activities)))
+ return activities
+
+ def query_mongo_data_days(self, types=None, days=7):
+ "Query activities for a week, returns list of list of tuple (msg, meta), if activity is empty of that day, returns empty list"
+ if types == None:
+ types = self.query_types
+ # if we found cache file
+ if self.use_activities_cache and (os.path.exists(self.pickle_file) and
+ (datetime.datetime.today() - datetime.datetime.fromtimestamp(os.path.getmtime(self.pickle_file))).seconds < 1 * 60 * 60): # seconds -> hours
+ rospy.loginfo('Loading cached activities data {}'.format(datetime.datetime.fromtimestamp(os.path.getmtime(self.pickle_file))))
+ with open(self.pickle_file, 'rb') as f:
+ activities = pickle.load(f)
+ # check if activitis start from self.start_date
+ if len(activities) > 0 and len(activities[0]) > 0 and \
+ len(activities[0][0]) > 0 and activities[0][0][1].get('timestamp') :
+ timestamp = datetime.datetime.fromtimestamp(activities[0][0][1]['timestamp']//1000000000, JST)
+ rospy.loginfo(' ... cached data is starting from {}'.format(timestamp))
+ if abs((timestamp - datetime.datetime.combine(self.start_date, datetime.datetime.min.time(), tzinfo=JST)).total_seconds()) < 86400 : # 24 hours
+ rospy.loginfo(' ... using cached activities for {}'.format(self.start_date))
+ return activities
+ else:
+ rospy.logwarn("Cached file({}) is different from start_date({}), loading from mongoDB".format(timestamp, self.start_date))
+
+ activities = []
+ today = self.start_date ## for debug ... -> - datetime.timedelta(hours=24)
+ startdate = datetime.datetime(today.year, today.month, today.day, tzinfo=JST)
+ for days_before in range(days):
+ activities_raw = self.query_mongo_data(types,
+ startdate-datetime.timedelta(hours=days_before*24),
+ startdate-datetime.timedelta(hours=(days_before-1)*24))
+ activities.append(activities_raw)
+
+ # dump msgs
+ if self.use_activities_cache:
+ with open(self.pickle_file, 'wb') as f:
+ pickle.dump(activities, f)
+ f.flush()
+
+ return activities
+
+ def make_state_frequency(self, diary_activities_raw):
+ message = list(filter(lambda x: 'jsk_recognition_msgs/VQATaskActionResult' not in x, self.query_types))
+ diary_activities_freq = []
+ for activities_raw in diary_activities_raw:
+ activities_raw_state = [x['state'] for x in [x for x in activities_raw if x['type'] in message]]
+ activities_freq = {key: activities_raw_state.count(key) for key in set(activities_raw_state)}
+ rospy.logwarn("Found {} activity data (make_state_frequency)".format(len(activities_raw)))
+ if len(activities_raw) > 0:
+ rospy.logwarn(" period : {} {}".format(activities_raw[-1]['timestamp'], activities_raw[0]['timestamp']))
+ rospy.logwarn(" freq : {} ({})".format(activities_freq, len(activities_freq)))
+ diary_activities_freq.append(activities_freq)
+ return diary_activities_freq
+
+ def make_activities_events(self, diary_activities_raw, message = None):
+ if not message:
+ message = list(filter(lambda x: 'jsk_recognition_msgs/VQATaskActionResult' not in x, self.query_types))
+ diary_activities_events = []
+ for activities_raw in diary_activities_raw:
+ activities_events = {}
+ for activities in activities_raw:
+ timestamp = activities['timestamp']
+ event = activities['state']
+ if activities['type'] not in message:
+ continue
+
+ if event in activities_events:
+ time_since_last_seen = activities_events[event]['last_seen'] - timestamp
+ if time_since_last_seen.seconds/60 < 30: # min
+ activities_events[event]['tmp_duration'] += time_since_last_seen
+ else:
+ # 'duration' keeps maximum duration
+ # if activities_events[event]['tmp_duration'] > activities_events[event]['duration']:
+ # activities_events[event]['duration'] = activities_events[event]['tmp_duration']
+ # 'duration' keeps accumulated duration
+ activities_events[event]['duration'] += activities_events[event]['tmp_duration']
+ activities_events[event]['tmp_duration'] = datetime.timedelta()
+ activities_events[event]['last_seen'] = timestamp
+ activities_events[event]['count'] += 1
+ else:
+ activities_events.update({event : {'last_seen' : timestamp, 'tmp_duration' : datetime.timedelta(), 'duration' : datetime.timedelta(seconds=300), 'count': 1}}) # initially assume 5 min interaction, set initial count is 1
+ # print("{} {:24} {} {}".format(timestamp, event, activities_events[event]['duration'], activities_events[event]['tmp_duration']))
+ for event in activities_events:
+ activities_events[event]['duration'] += activities_events[event]['tmp_duration']
+ diary_activities_events.append(activities_events)
+ return diary_activities_events
+
+ def make_image_activities(self, diary_activities_raw = None):
+ if not diary_activities_raw:
+ mongo_data_days = self.query_mongo_data_days()
+ diary_activities_raw = self.make_robot_activities_raw(mongo_data_days)
+
+ # create activities event data
+ # activities_events[event_name] = {'duration', datetime.timedelta, 'count': int}
+ # diary_activities_events = [activities_events for day1, activities_events for day2, ....]
+ diary_activities_events = self.make_activities_events(diary_activities_raw, 'jsk_recognition_msgs/VQATaskActionResult')
+
+ image_activities = {}
+ for activities_raw in diary_activities_raw:
+ for activities in activities_raw:
+ if activities['type'] != 'jsk_recognition_msgs/VQATaskActionResult':
+ continue
+ timestamp = activities['timestamp']
+ answer = activities['state']
+ if len(answer.split()) > 3 and \
+ max([jaccard_similarity(x.lower().split(' '), answer.lower().split(' ')) for x in image_activities.keys()]+[0]) < 0.5:
+ image_activities.update({answer : timestamp})
+ if (len(image_activities)) > 0:
+ break
+ else:
+ rospy.logwarn(" no valid image description is found...")
+ #
+ if len(image_activities) == 0:
+ return {}
+
+ prompt = "Please select the most memorable and illuminating event by number from the list below.\n\n"
+ n = 0
+ for answer, timestamp in image_activities.items():
+ prompt += "{}: {} ({})\n".format(n, answer, timestamp)
+ n += 1
+
+ # Avoid error 'This model's maximum context length is 4097 tokens, however you requested 5464 tokens (4952 in your prompt; 512 for the completion). Please reduce your prompt'
+ no = len(image_activities)
+ if len(prompt) + 512 < 4097:
+ response = self.openai_completion(prompt)
+ n = re.search(r'(\d+)', response)
+ if n:
+ no = int(n.group(1))
+ else:
+ rospy.logerr("too long prompt...")
+
+ if no >= len(image_activities):
+ rospy.loginfo("no is {}, so use random....".format(no))
+ no = random.randrange(len(image_activities))
+
+ answer, timestamp = list(image_activities.items())[no]
+ rospy.loginfo("topic of the day")
+ rospy.loginfo(" answer : {}".format(answer))
+ rospy.loginfo(" timestamp : {}".format(timestamp))
+ results = self.query_images_and_classify(query = answer,
+ start_time = timestamp - datetime.timedelta(minutes=5),
+ end_time = timestamp + datetime.timedelta(minutes=5),
+ classify = False)
+ if not IsHeadless:
+ cv2.imshow('images of today', cv2.hconcat([cv2.imdecode(np.fromstring(result['image'].data, np.uint8), cv2.IMREAD_COLOR) for result in results]))
+ cv2.waitKey(100)
+
+
+ if len(results) > 0:
+ # pubish as card
+ filename = tempfile.mktemp(suffix=".jpg", dir=rospkg.get_ros_home())
+ self.write_image_with_annotation(filename, results[0], answer)
+ return {'text': answer, 'filename': filename}
+
+ def make_activity(self, diary_activities_raw = None):
+ "Returns activity prompts"
+ # create diary activities_raw
+ # list of (timestamp, event) [[{'temestamp': , 'state':, 'type': }, {'temestamp': , 'state':, 'type': } ...],[#for 2nd day],[#for 3rd day]...]
+ if not diary_activities_raw:
+ mongo_data_days = self.query_mongo_data_days()
+ diary_activities_raw = self.make_robot_activities_raw(mongo_data_days)
+
+ # make frequencey data for 7days
+ # activities_freq {'event_1' : count, 'event_2' : count}
+ # diary_activities_freq = [activities_freq for day1, activities_freq for day2, ...]
+ diary_activities_freq = self.make_state_frequency(diary_activities_raw)
+
+ # create activities event data
+ # activities_events[event_name] = {'duration', datetime.timedelta, 'count': int}
+ # diary_activities_events = [activities_events for day1, activities_events for day2, ....]
+ #diary_activities_events = self.make_activities_events(diary_activities_raw, 'aibo_driver/')
+ diary_activities_events = self.make_activities_events(diary_activities_raw)
+ diary_recognition_events = self.make_activities_events(diary_activities_raw,
+ message='jsk_recognition_msgs/VQATaskActionResult')
+ diary_activities_events = [{k: v for d in L for k, v in d.items()} for L in zip(diary_activities_events, diary_recognition_events)]
+
+ for activities_events in diary_activities_events:
+ print("--")
+ for event, duration in sorted(activities_events.items(), key=lambda x: x[1]['duration'], reverse=True):
+ print("{:24} : {:4.2f} min ({} times)".format(event, duration['duration'].seconds/60, duration['count']))
+
+ # flatten list
+ activities_events = [x for events in diary_activities_events for x in events.keys()] # get all activities with duplicates
+
+ if len(activities_events) == 0:
+ return ""
+ # percentages of activities happend
+ prompt = "{}\n\n".format(list(list(filter(None, diary_activities_events))[0].items())[0][1]['last_seen'].strftime("%a %d %b %Y"))
+ prompt += "\n 'action : time'\n"
+
+ # sort activities event by it's occurence [list] -> sorted({key: count})
+ activities_events_freq = sorted({key: activities_events.count(key) for key in set(activities_events)}.items(), key=lambda x:x[1], reverse=True)
+ always_action = False
+ for event, count in activities_events_freq[:10]:
+ if count/float(len(diary_activities_events)) > 0.25:
+ if next((x for x in diary_activities_events if event in x.keys()), None):
+ prompt += "{} : {}\n".format(event, next(x for x in diary_activities_events if event in x.keys())[event]['last_seen'])
+ else:
+ prompt += "{} : {:.2f}\n".format(event, count/float(len(diary_activities_events)))
+ always_action = True
+
+ if not always_action:
+ prompt += "none\n"
+
+ # estimate frequence in 24h
+ prompt += "\n 'action : increase from the number of time done yesterday'\n"
+
+ more_yesterday_action = False
+ diary_activities_events_no_empty = list(filter(None, diary_activities_events))
+ if len(diary_activities_events_no_empty) >= 2:
+ l0 = diary_activities_events_no_empty[0]
+ l1 = diary_activities_events_no_empty[1]
+ for event in set(activities_events):
+ if event in l0 and event in l1:
+ increase = l0[event]['count'] - l1[event]['count']
+ if increase > 0:
+ prompt += "{} : +{}\n".format(event, increase)
+ more_yesterday_action = True
+ if not more_yesterday_action:
+ prompt += "none\n"
+
+ #
+ prompt += "\n 'action : number of days passed since you last did it'\n"
+ long_time_action = False
+ # if we have more than 10 activities, remove lowest events
+ for event in [x[0] for x in sorted(diary_activities_events[0].items(), key=lambda x: x[1]['duration'].seconds, reverse=True)[:10]] \
+ if len(diary_activities_events[0].keys()) > 10 else diary_activities_events[0].keys():
+ n = 1
+ for diary_activities_event in diary_activities_events[1:]:
+ if event not in diary_activities_event.keys() or diary_activities_event[event]['duration'].seconds < 1:
+ n += 1
+ else:
+ break
+ if n >= 2:
+ prompt += "{} : {}\n".format(event, n)
+ long_time_action = True
+ if not long_time_action:
+ prompt += "none\n"
+
+ rospy.logdebug(prompt)
+ return prompt
+
+ def make_robot_activities_raw(self, mongo_data_days = None):
+ "Create robot activities for several days, returns list of list of tuple(temestamp, event)"
+ # list of list of tuples (msg, meta) [[(msg, meta), (msg, meta),...],[#for 2nd day], [#for 3rd day]]
+ if not mongo_data_days:
+ mongo_data_days = self.query_mongo_data_days()
+ diary_activities_raw = [] ## (timestamp, event)
+ for mongo_data in mongo_data_days:
+ rospy.loginfo("Found {} mongo data (make_robot_activities_raw)".format(len(mongo_data)))
+ rospy.loginfo(" types : {}".format(list(set([x[1]['stored_type'] for x in mongo_data]))))
+ activities_raw = []
+ input_topics = []
+ for msg, meta in mongo_data:
+ state = []
+ timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST)
+ input_topics.append(meta['input_topic'])
+ if meta['stored_type'] == 'jsk_recognition_msgs/VQATaskActionResult':
+ #rospy.logwarn("{} {}".format(timestamp, msg.result.result.result))
+ if len(msg.result.result.result) > 0:
+ answer = msg.result.result.result[0].answer
+
+ if any([term in answer for term in ['Silhouetted intrigue', '#Robot', 'glimpse', 'cable', 'Focus']]):
+ rospy.logwarn("skip JUST FOR DEMO : {}".format(answer))
+ continue
+
+ if len(answer.split()) > 3:
+ rospy.logwarn("{} {}".format(timestamp, answer))
+ state = [answer]
+ else:
+ rospy.logwarn("Unknown stored type: {}".format(meta['stored_type']))
+ continue
+ # create activities_raw
+ for s in state:
+ activities_raw.append({'timestamp': timestamp, 'state': s, 'type': meta['stored_type']})
+
+ diary_activities_raw.append(activities_raw)
+
+ if len(activities_raw) > 0:
+ rospy.loginfo(" period : {} {}".format(activities_raw[-1]['timestamp'], activities_raw[0]['timestamp']))
+ rospy.loginfo(" topics : {}".format({key: input_topics.count(key) for key in set(input_topics)}))
+ ##
+ return diary_activities_raw ## (timestamp, event)
+
+ def make_diary(self, language="Japanese"):
+ "make dirary"
+ # get mongo data for 7 days
+ mongo_data_days = self.query_mongo_data_days()
+ diary_activities_raw = self.make_robot_activities_raw(mongo_data_days)
+ # get most impressive image and text
+ topic_of_day = None
+ filename = False
+
+ image_activity = self.make_image_activities(diary_activities_raw)
+ if image_activity:
+ topic_of_day = image_activity['text']
+ filename = image_activity['filename']
+
+ # create prompt
+ if self.prompt_type == 'personality':
+ # from Ichikura's comment on 2024/Jan/23
+ prompt = "\nYou are a pet robot, {robot_type}. Your name is '{robot_name}.'\n{personality}\nPlease write a brief diary of today from the data. Note, however, that you are a baby robot, so please write today's diary as simply and childishly as possible.\n\n\n".format(robot_name=self.robot_name, robot_type=self.robot_type, personality=self.personality)
+ else:
+ prompt = "You are a baby robot. You were taken care of by people around you.\n\n"
+
+ if topic_of_day:
+ prompt += "Today, you are impressed by " + topic_of_day + "."
+ prompt += "The following data is a record of today's actions regarding what we always do, what we did more than yesterday, and what happened after a long time. Please write a brief diary from the data. Note, however, that you are a baby robot, so please make it a child-like diary. Add your name and date in a diary\n\n"
+
+ prompt += self.make_activity(diary_activities_raw)
+
+ response = self.openai_completion(prompt)
+ rospy.loginfo("prompt = {}".format(prompt))
+ rospy.loginfo("response = {}".format(response))
+
+ prompt = "Please rewrite the following diary in {language}. Write as childlike as you can. Write around 360 {language} charactors.\n\n".format(language = language) + response
+ # prompt = "Please rewrite the following diary as childlike as you can. Write a maximum 120 {} charactors.\n\n".format(language) + response
+ response_short = self.openai_completion(prompt)
+ rospy.loginfo("prompt = {}".format(prompt))
+ rospy.loginfo("response = {}".format(response_short))
+ if len(response_short) > 64:
+ response = response_short
+ else:
+ rospy.logerr("response is too short ({} chars), use original version".format(len(response_short)))
+
+ response = {'text': response}
+ if filename:
+ response.update({'filename': filename})
+
+ return response
+
+ def make_image_activities_raw(self, diary_activities_raw = None):
+ '''
+ returns {answer2: timestamp1, answer2: timestamp2, ...}
+ '''
+ if not diary_activities_raw:
+ mongo_data_days = self.query_mongo_data_days()
+ diary_activities_raw = self.make_robot_activities_raw(mongo_data_days)
+
+ # create activities event data
+ # activities_events[event_name] = {'duration', datetime.timedelta, 'count': int}
+ # diary_activities_events = [activities_events for day1, activities_events for day2, ....]
+ diary_activities_events = self.make_activities_events(diary_activities_raw, 'jsk_recognition_msgs/VQATaskActionResult')
+
+ image_activities_raw = {}
+ for activities_raw in diary_activities_raw:
+ for activities in activities_raw:
+ if activities['type'] != 'jsk_recognition_msgs/VQATaskActionResult':
+ continue
+ timestamp = activities['timestamp']
+ answer = activities['state']
+ image_activities_raw.update({answer: timestamp})
+ return image_activities_raw
+
+ def make_response(self, text, language="Japanese"):
+ # translate to english
+ if language=="Japanese":
+ text = self.openai_completion('Translate the following sentences to English\n\n{}'.format(text))
+ # chosse relative images
+ image_activities = self.make_image_activities_raw()
+ prompt = "From the list below, please select the one that best relates to the sentense '{}'. Please use number in your answer\n\n".format(text)
+
+ n = 0
+ for answer, timestamp in image_activities.items():
+ prompt += "{}: {} ({})\n".format(n, answer, timestamp)
+ n += 1
+ rospy.logerr(prompt)
+
+ # ask question
+ response = self.openai_completion(prompt)
+ n = re.search(r'(\d+)', response)
+ answer, timestamp = None, None
+ if n:
+ no = int(n.group(1))
+ if no >= 0 and no < len(image_activities):
+ image_activity = list(image_activities.items())[no]
+ answer, timestamp = image_activity
+ rospy.loginfo("Choose {} : {} as corresponging memory ({})".format(no, answer, timestamp))
+
+
+ # create response
+ prompt = "\nYou are a baby robot. You were taken care of by people around you.\n\n\n"
+
+ prompt += "If your friend tells you '{text}', ".format(text=text)
+ if answer:
+ prompt += "and you remembered that you feel '{answer}' at that moment. ".format(answer=answer)
+ prompt += "What would you reply? Show only the reply.\n\n"
+ ##prompt += self.make_activity()
+
+ response = self.openai_completion(prompt)
+ rospy.loginfo("prompt = {}".format(prompt))
+ rospy.loginfo("response = {}".format(response))
+
+ prompt = "Please rewrite the following response in {language}. Write as childlike as you can. Write around 140 {language} charactors.\n\n".format(language = language) + response
+
+ response = self.openai_completion(prompt)
+ rospy.loginfo("prompt = {}".format(prompt))
+ rospy.loginfo("response = {}".format(response))
+
+ if timestamp is None or '元気' in text:
+ rospy.logerr("Use latest images")
+ start_time = datetime.datetime.now(JST) + datetime.timedelta(minutes=-60)
+ end_time = datetime.datetime.now(JST)
+ else:
+ start_time = timestamp - datetime.timedelta(minutes=5)
+ end_time = timestamp + datetime.timedelta(minutes=5)
+
+ results = self.query_images_and_classify(query = "....",
+ start_time = start_time,
+ end_time = end_time,
+ classify = False)
+ if len(results) > 0:
+ if not IsHeadless: # debug
+ try:
+ concat_images = cv2.hconcat([cv2.imdecode(np.fromstring(result['image'].data, np.uint8), cv2.IMREAD_COLOR) for result in results])
+ filename = tempfile.mktemp(suffix=".jpg", dir=rospkg.get_ros_home())
+ cv2.imwrite(filename, concat_images)
+ rospy.logwarn("save all images to {}".format(filename))
+ cv2.imshow('images for response', concat_images)
+ cv2.waitKey(100)
+ except Exception as e:
+ rospy.logerr(e)
+ pass
+ # select closest image for response
+ # pubish as card
+ if timestamp:
+ results.sort(key=lambda x: abs((x['timestamp'] - timestamp).total_seconds()))
+ filename = tempfile.mktemp(suffix=".jpg", dir=rospkg.get_ros_home())
+ self.write_image_with_annotation(filename, results[0], "")
+ return {'text': response, 'filename': filename}
+
+ return {'text': response}
+
+ def make_reply(self, message, lang="en", startdate=datetime.datetime.now(JST)-datetime.timedelta(hours=24), duration=datetime.timedelta(hours=24) ):
+ enddate = startdate+duration
+ rospy.logwarn("Run make_reply({} from {} to {})".format(message, startdate, enddate))
+ query = self.text_to_salience(message)
+ rospy.logwarn("query using salience word '{}'".format(query))
+ # look for images
+ try:
+ # get chat message
+ results, chat_msgs = self.query_dialogflow(query, startdate, enddate, threshold=0.25)
+ # retry = 0
+ # while retry < 3 and len(results) == 0 and len(chat_msgs.metas) > 0:
+ # meta = json.loads(chat_msgs.metas[-1].pairs[0].second)
+ # results, chat_msgs = self.query_dialogflow(query, datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST))
+ # retry = retry + 1
+ # sort based on similarity with 'query'
+ chat_msgs_sorted = sorted(results, key=lambda x: x['similarity'], reverse=True)
+
+ if len(chat_msgs_sorted) == 0:
+ rospy.logwarn("no chat message was found")
+ else:
+ # query images that was taken when chat_msgs are stored
+ msg = chat_msgs_sorted[0]['msg']
+ meta = chat_msgs_sorted[0]['meta']
+ text = chat_msgs_sorted[0]['message']
+ startdate = chat_msgs_sorted[0]['timestamp']
+ action = chat_msgs_sorted[0]['action']
+ similarity = chat_msgs_sorted[0]['similarity']
+ # query chat to get response
+ #meta = json.loads(chat_msgs_sorted[0]['meta'].pairs[0].second)
+ # text = msg.message.argument_text or msg.message.text
+ # startdate = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST)
+ rospy.loginfo("Found message '{}'({}) at {}, corresponds to query '{}' with {:2f}%".format(text, action, startdate.strftime('%Y-%m-%d %H:%M:%S'), query, similarity))
+
+ # query images when chat was received
+ start_time = startdate # startdate is updated with found chat space
+ end_time = enddate # enddate is not modified within this function, it is given from chat
+ results = self.query_images_and_classify(query=query, start_time=start_time, end_time=end_time)
+
+ # no images found
+ if len(results) == 0:
+ return {'text': '記憶がありません🤯'}
+
+ end_time = results[-1]['timestamp']
+
+ # sort
+ results = sorted(results, key=lambda x: x['similarities'], reverse=True)
+ rospy.loginfo("Probabilities of all images {}".format(list(map(lambda x: (x['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), x['similarities']), results))))
+ best_result = results[0]
+
+ '''
+ # if probability is too low, try again
+ while len(results) > 0 and results[0]['similarities'] < 0.25:
+
+ start_time = end_time-datetime.timedelta(hours=24)
+ timestamp = datetime.datetime.now(JST)
+ results = self.query_images_and_classify(query=query, start_time=start_time, end_time=end_time, limit=300)
+ if len(results) > 0:
+ end_time = results[-1]['timestamp']
+ # sort
+ results = sorted(results, key=lambda x: x['similarities'], reverse=True)
+ #rospy.loginfo("Probabilities of all images {}".format(list(map(lambda x: (x['label'], x['similarities']), results))))
+ if len(results) > 0 and results[0]['similarities'] > best_result['similarities']:
+ best_result = results[0]
+
+ rospy.loginfo("Found '{}' image with {:0.2f} % simiarity at {}".format(best_result['label'], best_result['similarities'], best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S')))
+ '''
+
+ ## make prompt
+ reaction = self.describe_image_scene(best_result['image'])
+ if len(chat_msgs_sorted) > 0 and chat_msgs_sorted[0]['action'] and 'action' in chat_msgs_sorted[0]:
+ reaction += " and you felt " + chat_msgs_sorted[0]['action']
+ rospy.loginfo("reaction = {}".format(reaction))
+
+ # make prompt
+ prompt = 'if you are a pet and someone tells you \"' + message + '\" when we went together, ' + \
+ 'and ' + reaction + ' in your memory of that moment, what would you reply? '+ \
+ 'Show only the reply in {lang}'.format(lang={'en': 'English', 'ja':'Japanese'}[lang])
+ loop = 0
+ result = None
+ while loop < 3 and result is None:
+ try:
+ result = self.completion(prompt=prompt,temperature=0)
+ except rospy.ServiceException as e:
+ rospy.logerr("Service call failed: %s"%e)
+ result = None
+ loop += 1
+ result.text = result.text.lstrip().encode('utf8')
+ rospy.loginfo("prompt = {}".format(prompt))
+ rospy.loginfo("result = {}".format(result))
+ # pubish as card
+ filename = tempfile.mktemp(suffix=".jpg", dir=rospkg.get_ros_home())
+ self.write_image_with_annotation(filename, best_result, prompt)
+ return {'text': result.text, 'filename': filename}
+
+ except Exception as e:
+ raise ValueError("Query failed {} {}".format(e, traceback.format_exc()))
+
+
+ def openai_completion(self, prompt, temperature=0):
+ loop = 0
+ result = None
+ while loop < 5 and result is None:
+ try:
+ result = self.completion(prompt=prompt,temperature=temperature)
+ if result.text == '':
+ rospy.logwarn(result)
+ rospy.logwarn("result text is too short, retry completion")
+ rospy.sleep(2)
+ result = None
+ except rospy.ServiceException as e:
+ rospy.logerr("Service call failed: %s"%e)
+ rospy.sleep(2)
+ result = None
+ loop += 1
+ if result is None:
+ raise Exception('[ERROR] openni_completion failed to complete {}'.format(prompt))
+ result.text = result.text.lstrip()
+ rospy.logdebug("prompt = {}".format(prompt))
+ rospy.logdebug("result = {}".format(result))
+ return result.text
+
+ def write_image_with_annotation(self, filename, best_result, prompt):
+ image = bridge.compressed_imgmsg_to_cv2(best_result['image'])
+ _, width, _ = image.shape
+ scale = width/640.0
+ if 'label' in best_result and 'similarities' in best_result:
+ cv2.putText(image, "{} ({:.2f}) {}".format(best_result['label'], best_result['similarities'], best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S')),
+ (10,int(20*scale)), cv2.FONT_HERSHEY_SIMPLEX, 0.5*scale, (255,255,255), 8, 1)
+ cv2.putText(image, "{} ({:.2f}) {}".format(best_result['label'], best_result['similarities'], best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S')),
+ (10,int(20*scale)), cv2.FONT_HERSHEY_SIMPLEX, 0.5*scale, (0,0,0), 2, 1)
+ string_width = 70
+ for i in range(0, len(prompt), string_width): # https://stackoverflow.com/questions/13673060/split-string-into-strings-by-length
+ text = prompt[i:i+string_width]
+ cv2.putText(image, text, (10,int(43*scale)+int(i/string_width*20)), cv2.FONT_HERSHEY_SIMPLEX, 0.5*scale, (255,255,255), 4, 1)
+ cv2.putText(image, text, (10,int(43*scale)+int(i/string_width*20)), cv2.FONT_HERSHEY_SIMPLEX, 0.5*scale, (0,0,0), 1, 1)
+ cv2.imwrite(filename, image)
+ rospy.logwarn("save images to {}".format(filename))
+
+
+ def query_dialogflow(self, query, start_time, end_time, limit=30, threshold=0.0):
+ rospy.logwarn("Query dialogflow from {} until {}".format(start_time, end_time))
+ meta_query= {'published_at': {"$lt": end_time, "$gt": start_time}}
+ meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),)
+ chat_msgs = self.query(database = 'jsk_robot_lifelog',
+ collection = self.robot_name,
+ # type = 'google_chat_ros/MessageEvent',
+ type = 'dialogflow_task_executive/DialogTextActionResult',
+ single = False,
+ # limit = limit,
+ meta_query = StringPairList(meta_tuple),
+ sort_query = StringPairList([StringPair('_meta.published_at', '-1')]))
+
+ # optimization... send translate once
+ messages = ''
+ for msg, meta in zip(chat_msgs.messages, chat_msgs.metas):
+ msg = deserialise_message(msg)
+ message = msg.result.response.query.replace('\n','')
+ messages += message + '\n'
+ messages = self.translate(messages, dest="en").text.split('\n')
+
+ # show chats
+ results = []
+ for msg, meta in zip(chat_msgs.messages, chat_msgs.metas):
+ msg = deserialise_message(msg)
+ meta = json.loads(meta.pairs[0].second)
+ timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST)
+ # message = msg.message.argument_text or msg.message.text
+ message = msg.result.response.query
+ #message_translate = self.translate(message, dest="en").text
+ message_translate = messages.pop(0).strip()
+ result = {'message': message,
+ 'message_translate': message_translate,
+ 'timestamp': timestamp,
+ 'similarity': difflib.SequenceMatcher(None, query, message_translate).ratio(),
+ 'action': msg.result.response.action,
+ 'msg': msg,
+ 'meta': meta}
+ if msg.result.response.action in ['make_reply', 'input.unknown']:
+ rospy.logwarn("Found dialogflow messages {}({}) at {} but skipping (action:{})".format(result['message'], result['message_translate'], result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), msg.result.response.action))
+ else:
+ rospy.loginfo("Found dialogflow messages {}({}) ({}) at {} ({}:{:.2f})".format(result['message'], result['message_translate'], msg.result.response.action, result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), query, result['similarity']))
+ if ( result['similarity'] > threshold):
+ results.append(result)
+ else:
+ rospy.logwarn(" ... skipping (threshold: {:.2f})".format(threshold))
+
+
+ return results, chat_msgs
+
+
+ def query_images_and_classify(self, query, start_time, end_time, limit=10, classify=True):
+ rospy.logwarn("Query images from {} to {}".format(start_time, end_time))
+ meta_query= {#'input_topic': '/spot/camera/hand_color/image/compressed/throttled',
+ 'published_at': {"$gt": start_time, "$lt": end_time}}
+ meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),)
+ msgs = self.query(database = 'jsk_robot_lifelog',
+ collection = self.robot_name,
+ type = 'sensor_msgs/CompressedImage',
+ single = False,
+ limit = limit,
+ meta_query = StringPairList(meta_tuple),
+ sort_query = StringPairList([StringPair('_meta.published_at', '-1')]))
+
+ rospy.loginfo("Found {} images".format(len(msgs.messages)))
+ if len(msgs.messages) == 0:
+ rospy.logwarn("no images was found")
+
+ # get contents of images
+ results = []
+ for msg, meta in zip(msgs.messages, msgs.metas):
+ meta = json.loads(meta.pairs[0].second)
+ timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST)
+ rospy.logwarn(" Found images at {}".format(timestamp))
+
+ result = {'query' : query, 'image' : deserialise_message(msg), 'timestamp': timestamp}
+ if classify:
+ goal = ClassificationTaskGoal()
+ goal.compressed_image = result['image']
+ goal.queries = [query]
+ self.classification_ac.send_goal(goal)
+ self.classification_ac.wait_for_result()
+ result = self.classification_ac.get_result()
+ idx = result.result.label_names.index(query)
+ #similarities = result.result.probabilities
+ similarities = result.result.label_proba
+ # rospy.logwarn(" ... {}".format(list(zip(result.result.label_names, map(lambda x: "{:.2f}".format(x), similarities)))))
+ rospy.logwarn("Found images at {} .. {}".format(timestamp, list(zip(result.result.label_names, map(lambda x: "{:.4f}".format(x), similarities)))))
+ result.update({'label': result.result.label_names[idx], 'probabilities': result.result.probabilities[idx], 'similarities': result.result.label_proba[idx]})
+ results.append(result)
+
+ # we do not sorty by probabilites, becasue we also need oldest timestamp
+ return results
+
+ def describe_image_scene(self, image):
+ goal = VQATaskGoal()
+ goal.compressed_image = image
+
+ # unusual objects
+ if random.randint(0,1) == 1:
+ goal.questions = ['what unusual things can be seen?']
+ reaction = 'you saw '
+ else:
+ goal.questions = ['what is the atmosphere of this place?']
+ reaction = 'the atmosphere of the scene was '
+
+ # get vqa result
+ self.vqa_ac.send_goal(goal)
+ self.vqa_ac.wait_for_result()
+ result = self.vqa_ac.get_result()
+ reaction += result.result.result[0].answer
+ return reaction
+
+ def publish_google_chat_card(self, text, space, filename=None):
+ goal = SendMessageGoal()
+ goal.text = text
+ if filename:
+ goal.cards = [Card(sections=[Section(widgets=[WidgetMarkup(image=Image(localpath=filename))])])]
+ goal.space = space
+ rospy.logwarn("send {} to {}".format(goal.text, goal.space))
+ ret = self.chat_ros_ac.send_goal_and_wait(goal, execute_timeout=rospy.Duration(0.10))
+ result = self.chat_ros_ac.get_result()
+ if not result.done:
+ rospy.logerr("publish_google_chat_card: failed to send message, send_goal_and_wait({}), result.done({})".format(ret, result.done))
+ return False
+ return True
+
+ def text_to_salience(self, text):
+ goal = AnalyzeTextGoal()
+ goal.text = text;
+ self.analyze_text_ac.send_goal(goal)
+ self.analyze_text_ac.wait_for_result()
+ entity = self.analyze_text_ac.get_result()
+ if len(entity.entities) > 0:
+ return entity.entities[0].name
+ else:
+ return text
+
+ def translate(self, text, dest):
+ global translator
+ loop = 3
+ while loop > 0:
+ try:
+ ret = translator.translate(text, dest="en")
+ return ret
+ except Exception as e:
+ rospy.logwarn("Faile to translate {}".format(e))
+ time.sleep(1)
+ translator = Translator()
+ loop = loop - 1
+ return Translated(text=text, dest=dest)
+
+
+ def cb(self, msg):
+ ac_ret = False
+ space = 'spaces/AAAAoTwLBL0' ## default space JskRobotBot
+ if msg._type == 'google_chat_ros/MessageEvent':
+ text = msg.message.argument_text.lstrip() or msg.message.text.lstrip()
+ space = msg.space.name
+ rospy.logwarn("Received chat message '{}' on {}".format(text, datetime.datetime.now(JST).strftime('%Y-%m-%d %H:%M:%S')))
+ else:
+ rospy.logerr("Unknown message type {} on {}".format(msg._type, datetime.datetime.now(JST).strftime('%Y-%m-%d %H:%M:%S')))
+ return False
+
+ # when callbacked, update start_date to today
+ self.start_date=datetime.date.today()
+ try:
+ language = 'English' if is_ascii(text) else 'Japanese'
+ if any(x in text for x in ['diary', '日記']):
+ self.publish_google_chat_card("Sure!", space)
+ # check if text contains 'date'
+ try:
+ if not language is 'English':
+ date_text = self.openai_completion('Translate the following sentences to English\n\n{}'.format(text))
+
+ date_string = self.openai_completion('If "{}" contains date information, please return with "%Y-%m-%d" format. Note today is {}'.format(text if language is 'English' else self.openai_completion('Translate the following sentences to English\n\n{}'.format(text)), self.start_date.strftime('%Y-%m-%d %H:%M:%S')))
+ self.start_date = datetime.datetime.strptime(re.search(r'\d\d\d\d-\d\d-\d\d', date_string)[0], '%Y-%m-%d')
+ # remove cache #### FIXME
+ self.use_activities_cache = False
+ except Exception as e:
+ rospy.logwarn("No date information included in '{}' ({})".format(text, e))
+
+ ret = self.make_diary(language)
+ if 'filename' in ret:
+ # upload text first, then upload images
+ self.publish_google_chat_card(ret['text'], space)
+ self.publish_google_chat_card('', space, ret['filename'])
+ else:
+ self.publish_google_chat_card(ret['text'], space)
+ else:
+ ret = self.make_response(text, language)
+ if msg.message.sender.name:
+ response = "<{}>\n".format(msg.message.sender.name) + ret['text']
+ else:
+ response = ret['text']
+ self.publish_google_chat_card(response, space)
+ if 'filename' in ret:
+ self.publish_google_chat_card('', space, ret['filename'])
+
+
+ except Exception as e:
+ rospy.logerr("Callback failed {} {}".format(e, traceback.format_exc()))
+ self.publish_google_chat_card("💀 {}".format(e), space)
+ return True
+
+
+ def action_cb(self, goal):
+ msg = MessageEvent()
+ msg.message.text = goal.text
+ msg.message.argument_text = goal.text
+ msg.space.name = goal.space
+ self.cb(msg)
+ ret = self.chat_ros_ac.wait_for_result(rospy.Duration(5.0))
+ result = self.chat_ros_ac.get_result()
+ rospy.logwarn("action_cb: set_succeeded, wait_for_result({}), result.done({})".format(ret, result.done))
+ if ret and result.done:
+ self.sas.set_succeeded(SendMessageResult(done=True))
+ else:
+ self.sas.set_aborted()
diff --git a/database_talker/test_db/.keepme b/database_talker/test_db/.keepme
new file mode 100644
index 0000000000..e69de29bb2