Spaces:
Sleeping
Sleeping
JHK-maeshay
commited on
Commit
Β·
3e4fdb2
1
Parent(s):
ccfdf30
Revert "major fix"
Browse filesThis reverts commit 10ccadfd90a6b75bcb13241fde0a0fd321c5933b.
- core/make_reply.py +7 -5
core/make_reply.py
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
import re
|
| 2 |
|
| 3 |
# μμ±λ λͺ¨λ λ΄ μλ΅ κΈ°λ‘
|
| 4 |
-
def generate_reply(ctx, makePipeLine):
|
| 5 |
# μ΅μ΄ μλ΅
|
| 6 |
-
response = generate_valid_response(ctx, makePipeLine)
|
| 7 |
ctx.addHistory("bot", response)
|
| 8 |
|
| 9 |
# λΆμμ ν μλ΅μ΄ μ λλλ―λ‘ μ¬μ©νμ§ μμ
|
|
@@ -15,12 +15,12 @@ def generate_reply(ctx, makePipeLine):
|
|
| 15 |
'''
|
| 16 |
|
| 17 |
# λ΄ μλ΅ 1ν μμ±
|
| 18 |
-
def generate_valid_response(ctx, makePipeline) -> str:
|
| 19 |
user_name = ctx.getUserName()
|
| 20 |
bot_name = ctx.getBotName()
|
| 21 |
|
| 22 |
while True:
|
| 23 |
-
prompt = build_prompt(ctx.getHistory(), user_name, bot_name)
|
| 24 |
print("\n==========[DEBUG: Prompt]==========")
|
| 25 |
print(prompt)
|
| 26 |
print("===================================\n")
|
|
@@ -31,7 +31,7 @@ def generate_valid_response(ctx, makePipeline) -> str:
|
|
| 31 |
return clean_response(response, bot_name)
|
| 32 |
|
| 33 |
# μ
λ ₯ ν둬ννΈ μ 리
|
| 34 |
-
def build_prompt(history, user_name, bot_name):
|
| 35 |
with open("assets/prompt/init.txt", "r", encoding="utf-8") as f:
|
| 36 |
system_prompt = f.read().strip()
|
| 37 |
|
|
@@ -41,6 +41,8 @@ def build_prompt(history, user_name, bot_name):
|
|
| 41 |
role = user_name if turn["role"] == "user" else bot_name
|
| 42 |
dialogue += f"{role}: {turn['text']}\n"
|
| 43 |
|
|
|
|
|
|
|
| 44 |
# λͺ¨λΈμ λ§λ ν¬λ§· ꡬμ±
|
| 45 |
prompt = f"""### Instruction:
|
| 46 |
{system_prompt}
|
|
|
|
| 1 |
import re
|
| 2 |
|
| 3 |
# μμ±λ λͺ¨λ λ΄ μλ΅ κΈ°λ‘
|
| 4 |
+
def generate_reply(ctx, makePipeLine, user_msg):
|
| 5 |
# μ΅μ΄ μλ΅
|
| 6 |
+
response = generate_valid_response(ctx, makePipeLine, user_msg)
|
| 7 |
ctx.addHistory("bot", response)
|
| 8 |
|
| 9 |
# λΆμμ ν μλ΅μ΄ μ λλλ―λ‘ μ¬μ©νμ§ μμ
|
|
|
|
| 15 |
'''
|
| 16 |
|
| 17 |
# λ΄ μλ΅ 1ν μμ±
|
| 18 |
+
def generate_valid_response(ctx, makePipeline, user_msg) -> str:
|
| 19 |
user_name = ctx.getUserName()
|
| 20 |
bot_name = ctx.getBotName()
|
| 21 |
|
| 22 |
while True:
|
| 23 |
+
prompt = build_prompt(ctx.getHistory(), user_msg, user_name, bot_name)
|
| 24 |
print("\n==========[DEBUG: Prompt]==========")
|
| 25 |
print(prompt)
|
| 26 |
print("===================================\n")
|
|
|
|
| 31 |
return clean_response(response, bot_name)
|
| 32 |
|
| 33 |
# μ
λ ₯ ν둬ννΈ μ 리
|
| 34 |
+
def build_prompt(history, user_msg, user_name, bot_name):
|
| 35 |
with open("assets/prompt/init.txt", "r", encoding="utf-8") as f:
|
| 36 |
system_prompt = f.read().strip()
|
| 37 |
|
|
|
|
| 41 |
role = user_name if turn["role"] == "user" else bot_name
|
| 42 |
dialogue += f"{role}: {turn['text']}\n"
|
| 43 |
|
| 44 |
+
dialogue += f"{user_name}: {user_msg}\n"
|
| 45 |
+
|
| 46 |
# λͺ¨λΈμ λ§λ ν¬λ§· ꡬμ±
|
| 47 |
prompt = f"""### Instruction:
|
| 48 |
{system_prompt}
|