Skip to content

Commit

Permalink
EXPERIMENT CODE UPDATED (#98)
Browse files Browse the repository at this point in the history
## PR Checklist
Please check if your PR fulfills the following requirements:

- [ ] The commit message follows our convention
- [ ] Tests for the changes have been added (for bug fixes / features)


## PR Type
What kind of change does this PR introduce?

<!-- Please check the one that applies to this PR using "x". -->

- [ ] Feature
- [ ] Bugfix
- [ ] Refactor (no functional changes, no api changes)
- [ ] Design changes
- [ ] Comment added
- [ ] Code style update (formatting, local variables)
- [ ] Test code added
- [ ] Chore (other changes)
- [ ] Init
- [ ] Rename
- [ ] Remove
- [ ] Other... Please describe:


## What is the current behavior?
<!-- Please describe the current behavior that you are modifying, or
link to a relevant issue. -->

Issue Number: N/A


## What is the new behavior?


## Does this PR introduce a breaking change?
<!-- 급격한 변화가 있는가? -->
- [ ] Yes
- [ ] No


<!-- If this PR contains a breaking change, please describe the impact
and migration path for existing applications below. -->


## Other information
  • Loading branch information
awrgawa authored Nov 7, 2024
2 parents a5882fd + eeed236 commit ec3c4b8
Showing 1 changed file with 101 additions and 1 deletion.
102 changes: 101 additions & 1 deletion src/playground/main_chat.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
"""
from openai import OpenAI
import random
import common.info
Expand All @@ -8,6 +9,7 @@
PROMPT = common.info.getPrompt("playground_chat")
KEY = getKey('OPENAI')
MODEL = common.info.getModelName("Chat")
MODEL_4o = common.info.getModelName("Chat_4o")
NAME = "홍길동"
EMERGENCY = ["요금체납", "주거위기", "고용위기", "급여/서비스 탈락", "긴급상황 위기", "건강위기", "에너지위기"]
END_SIGNAL = "종료하겠습니다"
Expand Down Expand Up @@ -54,4 +56,102 @@
sheetname = names[name] # + ("_Good" if gb == 1 else "_Bad")
applyChat("Chat model", sheetname, conversation_history)
applyChat("Chat model(Backup)", sheetname, conversation_history)
# makeAssistFile(PROMPT, conversation_history)
# makeAssistFile(PROMPT, conversation_history)
"""
# TEMP CODE FOR EXPERIMENT
from openai import OpenAI
import random
import common.info
from common.auth_ import getKey
from saveFile import makeAssistFile, applyChat
from client import send_request_with_history

PROMPT = common.info.getPrompt("playground_chat")
KEY = getKey('OPENAI')
MODEL_mini = common.info.getModelName("Chat")
MODEL_4o = common.info.getModelName("Chat_4o")
MODEL_base = 'gpt-4o-mini'
NAME = "홍길동"
EMERGENCY = ["요금체납", "주거위기", "고용위기", "급여/서비스 탈락", "긴급상황 위기", "건강위기", "에너지위기"]
END_SIGNAL = "종료하겠습니다"

client = OpenAI(api_key=KEY)

# 위기유형 랜덤 선택 (한 번만 수행)
numbers = [1, 2, 3, 3, 3, 3, 3, 3, 3, 4]
EMERGENCY_COUNT = random.sample(numbers, 1)[0]
EMERGENCY_LIST = random.sample(EMERGENCY, EMERGENCY_COUNT)

# 초기 입력 생성
first_ment = f"{NAME}: " + ", ".join(EMERGENCY_LIST)

# mini와의 대화
conversation_history_model = [
{
"role": "system",
"content": PROMPT
}
]

print("아무것도 입력하지 않고 전송 시 종료됩니다.\n")

print("=== mini와의 대화 ===")
print(f"You: {first_ment}")

response_model = send_request_with_history(client, conversation_history_model, first_ment, MODEL_mini)
print("mini:", response_model)

while True:
user_input = input("You: ")
if user_input == '':
break
response_model = send_request_with_history(client, conversation_history_model, user_input, MODEL_mini)
print("mini:", response_model)
if END_SIGNAL in response_model:
break

# 4o와의 대화
conversation_history_model_4o = [
{
"role": "system",
"content": PROMPT
}
]

print("\n=== 4o와의 대화 ===")
print(f"You: {first_ment}")

response_model_4o = send_request_with_history(client, conversation_history_model_4o, first_ment, MODEL_4o)
print("4o:", response_model_4o)

while True:
user_input = input("You: ")
if user_input == '':
break
response_model_4o = send_request_with_history(client, conversation_history_model_4o, user_input, MODEL_4o)
print("4o:", response_model_4o)
if END_SIGNAL in response_model_4o:
break

# base와의 대화
conversation_history_model_base = [
{
"role": "system",
"content": PROMPT
}
]

print("\n=== base와의 대화 ===")
print(f"You: {first_ment}")

response_model_base = send_request_with_history(client, conversation_history_model_base, first_ment, MODEL_base)
print("base:", response_model_base)

while True:
user_input = input("You: ")
if user_input == '':
break
response_model_base = send_request_with_history(client, conversation_history_model_base, user_input, MODEL_base)
print("base:", response_model_base)
if END_SIGNAL in response_model_base:
break

0 comments on commit ec3c4b8

Please sign in to comment.