Skip to content

Commit

Permalink
Merge pull request #92 from AndrewHUNGNguyen/develop
Browse files Browse the repository at this point in the history
Get Praises API Addition and Modification
  • Loading branch information
gregv authored Oct 2, 2024
2 parents 73ce95f + 8827bb5 commit cb19096
Show file tree
Hide file tree
Showing 3 changed files with 79 additions and 11 deletions.
32 changes: 28 additions & 4 deletions api/messages/messages_service.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from common.utils import safe_get_env_var
from common.utils.slack import send_slack_audit, create_slack_channel, send_slack, invite_user_to_channel
from common.utils.firebase import get_hackathon_by_event_id, upsert_news, upsert_praise, get_github_contributions_for_user,get_volunteer_from_db_by_event
from common.utils.firebase import get_hackathon_by_event_id, upsert_news, upsert_praise, get_github_contributions_for_user,get_volunteer_from_db_by_event, get_recent_praises, get_praises_by_user_id
from common.utils.openai_api import generate_and_save_image_to_cdn
from common.utils.github import create_github_repo
from api.messages.message import Message
Expand Down Expand Up @@ -1218,7 +1218,7 @@ def save_news(json):

def save_praise(json):
logger.debug(f"Attempting to save the praise with the json object {json}")
# Take in Slack message and summarize it using GPT-3.5

# Make sure these fields exist praise_receiver, praise_channel, praise_message
check_fields = ["praise_receiver", "praise_channel", "praise_message"]
for field in check_fields:
Expand All @@ -1231,11 +1231,35 @@ def save_praise(json):

logger.info("Updated praise successfully")

#get_news.cache_clear()
#logger.info("Cleared cache for get_news")
get_praises_about_user.cache_clear()
logger.info("Cleared cache for get_praises_by_user_id")

get_all_praises.cache_clear()
logger.info("Cleared cache for get_all_praises")

return Message("Saved praise")


@cached(cache=TTLCache(maxsize=100, ttl=600))
def get_all_praises():

# Get the praises about user with user_id
results = get_recent_praises()

logger.info(f"Here are the 20 most recently written praises: {results}")
return Message(results)

@cached(cache=TTLCache(maxsize=100, ttl=600))
def get_praises_about_user(user_id):

# Get the praises about user with user_id
results = get_praises_by_user_id(user_id)

logger.info(f"Here are all praises related to {user_id}: {results}")
return Message(results)

# -------------------- Praises methods end here --------------------------- #

async def save_lead(json):
token = json["token"]

Expand Down
19 changes: 15 additions & 4 deletions api/messages/messages_views.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@
get_npo_applications,
update_npo_application,
get_github_profile,
get_all_praises,
get_praises_about_user,
save_praise,
save_feedback,
get_user_feedback,
Expand Down Expand Up @@ -302,18 +304,27 @@ def store_praise():
# else return 401

token = request.headers.get("X-Api-Key")
sender_id = request.get_json().get("praise_sender")
receiver_id = request.get_json().get("praise_receiver")

# Check BACKEND_NEWS_TOKEN
if token == None or token != os.getenv("BACKEND_PRAISE_TOKEN"):
return "Unauthorized", 401
elif sender_id == receiver_id:
return "You cannot write a praise about yourself", 400
else:
logger.debug(f"Hre is the request object {request.get_json()}")
# try:
# logger.debug(f"Here is the request object: {request.get_json()}")
# except Exception as e:
# logger.error(f"Error logging request object: {e}")
return vars(save_praise(request.get_json()))

@bp.route("/praises", methods=["GET"])
def get_praises():
# return all praise data about user with user_id in route
return vars(get_all_praises())

@bp.route("/praise/<user_id>", methods=["GET"])
def get_praises_about_self(user_id):
# return all praise data about user with user_id in route
return vars(get_praises_about_user(user_id))
# -------------------- Praises routes end here --------------------------- #

# -------------------- Problem Statement routes to be deleted --------------------------- #
Expand Down
39 changes: 36 additions & 3 deletions common/utils/firebase.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,9 @@
# set log level
logger.setLevel(logging.DEBUG)

# Declare constants here
MAX_PRAISES_ABOUT_USER = 50
MAX_PRAISES = 20

def get_db():
if safe_get_env_var("ENVIRONMENT") == "test":
Expand Down Expand Up @@ -1079,9 +1082,9 @@ def get_volunteer_from_db_by_event(event_id: str, volunteer_type: str) -> dict:
if not event_id:
logger.warning(f"get {volunteer_type}s end (no event_id provided)")
return {"data": []}

db = get_db()

db = get_db()

try:
# Use FieldFilter for more explicit and type-safe queries
query = db.collection("volunteers").where(
Expand All @@ -1105,4 +1108,34 @@ def get_volunteer_from_db_by_event(event_id: str, volunteer_type: str) -> dict:

except Exception as e:
logger.error(f"Error retrieving {volunteer_type}s: {str(e)}")
return {"data": [], "error": str(e)}
return {"data": [], "error": str(e)}

def get_recent_praises():
# Gets 20 most recent praises and sort by timestamp
db = get_db()
praises = db.collection('praises').order_by("timestamp", direction=firestore.Query.DESCENDING).limit(MAX_PRAISES).stream()

# convert each document to a python dictionary
praise_list = []
for doc in praises:
doc_json = doc.to_dict()
doc_json["id"] = doc.id
praise_list.append(doc_json)

# return the praise_list sorted in descending order by timestamp
return praise_list

def get_praises_by_user_id(user_id):
# Gets 50 most recent praises about user with user_id
db = get_db()
praises = db.collection('praises').where("praise_receiver", "==", user_id).order_by("timestamp", direction=firestore.Query.DESCENDING).limit(MAX_PRAISES_ABOUT_USER).stream()

# convert each document to a python dictionary
praise_list = []
for doc in praises:
doc_json = doc.to_dict()
doc_json["id"] = doc.id
praise_list.append(doc_json)

# return the praise_list sorted in descending order by timestamp
return praise_list

0 comments on commit cb19096

Please sign in to comment.