Skip to content

Commit

Permalink
Refactor: Make use of get_default_text_generator instead of llm
Browse files Browse the repository at this point in the history
  • Loading branch information
SverreNystad committed Sep 19, 2023
1 parent 560f0f8 commit 6b1c8d3
Showing 1 changed file with 2 additions and 8 deletions.
10 changes: 2 additions & 8 deletions src/referee.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
from enum import Enum

from langchain import OpenAI

from src.text_generation.config import GPTConfig
from src.text_generation.text_generator import get_default_text_generator

class Difficulty(Enum):
"""The difficulty of a action or task."""
Expand All @@ -21,14 +18,11 @@ class Difficulty(Enum):
NEARLY_IMPOSSIBLE = 1.0
"""A task of this difficulty is almost impossible for adventurers of an appropriate skill."""

api_key = GPTConfig.API_KEY
llm: OpenAI = OpenAI(openai_api_key=api_key) if api_key is not None else None

def decide_difficulty(context: str) -> float:
"""Decide the difficulty of the challenge based on the context."""

prompt = get_difficulty_template(context)
raw_difficulty = llm.predict(prompt)
raw_difficulty = get_default_text_generator().predict(prompt)
print(raw_difficulty)
difficulty = float(raw_difficulty)
return difficulty
Expand Down

0 comments on commit 6b1c8d3

Please sign in to comment.