mirror of
https://github.com/HIllya51/LunaTranslator.git
synced 2024-12-29 16:44:13 +08:00
fix
This commit is contained in:
parent
c25aa58c6b
commit
3a457ebc38
@ -257,13 +257,25 @@ class MAINUI:
|
||||
return
|
||||
|
||||
def textgetmethod(
|
||||
self, text, is_auto_run=True, waitforresultcallback=None, onlytrans=False
|
||||
self,
|
||||
text,
|
||||
is_auto_run=True,
|
||||
waitforresultcallback=None,
|
||||
onlytrans=False,
|
||||
donttrans=False,
|
||||
):
|
||||
with self.solvegottextlock:
|
||||
self.textgetmethod_1(text, is_auto_run, waitforresultcallback, onlytrans)
|
||||
self.textgetmethod_1(
|
||||
text, is_auto_run, waitforresultcallback, onlytrans, donttrans
|
||||
)
|
||||
|
||||
def textgetmethod_1(
|
||||
self, text, is_auto_run=True, waitforresultcallback=None, onlytrans=False
|
||||
self,
|
||||
text,
|
||||
is_auto_run=True,
|
||||
waitforresultcallback=None,
|
||||
onlytrans=False,
|
||||
donttrans=False,
|
||||
):
|
||||
safe_callback = waitforresultcallback if waitforresultcallback else lambda _: 1
|
||||
safe_callback_none = functools.partial(safe_callback, "")
|
||||
@ -309,11 +321,7 @@ class MAINUI:
|
||||
)
|
||||
except:
|
||||
pass
|
||||
if (
|
||||
(windows.GetKeyState(windows.VK_CONTROL) < 0)
|
||||
or (windows.GetKeyState(windows.VK_SHIFT) < 0)
|
||||
or (windows.GetKeyState(windows.VK_RETURN) < 0)
|
||||
):
|
||||
if donttrans:
|
||||
return safe_callback_none()
|
||||
if onlytrans == False:
|
||||
self.currenttext = text
|
||||
|
@ -5,6 +5,7 @@ import socket, gobject, uuid, subprocess, functools
|
||||
import ctypes, importlib, json
|
||||
import ctypes.wintypes
|
||||
from qtsymbols import *
|
||||
from string import Formatter
|
||||
from ctypes import CDLL, c_void_p, CFUNCTYPE, c_size_t, cast, c_char, POINTER
|
||||
from ctypes.wintypes import HANDLE
|
||||
from traceback import print_exc
|
||||
@ -860,3 +861,20 @@ def copytree(src, dst, copy_function=shutil.copy2):
|
||||
copy_function(srcname, dstname)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class SafeFormatter(Formatter):
|
||||
def format(self, format_string, must_exists=None, *args, **kwargs):
|
||||
if must_exists:
|
||||
check = "{" + must_exists + "}"
|
||||
if check not in format_string:
|
||||
format_string += check
|
||||
|
||||
return super().format(format_string, *args, **kwargs)
|
||||
|
||||
def get_value(self, key, args, kwargs):
|
||||
if key in kwargs:
|
||||
return super().get_value(key, args, kwargs)
|
||||
else:
|
||||
print(f"{key} is missing")
|
||||
return key
|
||||
|
@ -662,7 +662,13 @@ class texthook(basetext):
|
||||
|
||||
def dispatchtext(self, text):
|
||||
self.runonce_line = text
|
||||
return super().dispatchtext(text)
|
||||
|
||||
donttrans = (
|
||||
(windows.GetKeyState(windows.VK_CONTROL) < 0)
|
||||
or (windows.GetKeyState(windows.VK_SHIFT) < 0)
|
||||
or (windows.GetKeyState(windows.VK_RETURN) < 0)
|
||||
)
|
||||
return super().dispatchtext(text, donttrans=donttrans)
|
||||
|
||||
def gettextonce(self):
|
||||
return self.runonce_line
|
||||
|
@ -46,13 +46,10 @@ class basetext:
|
||||
print_exc()
|
||||
threading.Thread(target=self.sqlitethread).start()
|
||||
|
||||
def dispatchtext(self, text):
|
||||
def dispatchtext(self, *arg, **kwarg):
|
||||
if self.ending or not self.isautorunning:
|
||||
return
|
||||
if isinstance(text, tuple):
|
||||
self.textgetmethod(*text)
|
||||
else:
|
||||
self.textgetmethod(text)
|
||||
self.textgetmethod(*arg, **kwarg)
|
||||
|
||||
def waitfortranslation(self, text):
|
||||
resultwaitor = queue.Queue()
|
||||
|
@ -31,13 +31,8 @@ class TS(basetrans):
|
||||
|
||||
def createdata(self, message):
|
||||
temperature = self.config["Temperature"]
|
||||
system = self._gptlike_createsys("use_user_prompt", "user_prompt")
|
||||
|
||||
if self.config["use_user_prompt"]:
|
||||
system = self.config["user_prompt"]
|
||||
else:
|
||||
system = "You are a translator. Please help me translate the following {} text into {}, and you should only tell me the translation.".format(
|
||||
self.srclang, self.tgtlang
|
||||
)
|
||||
data = dict(
|
||||
system=system,
|
||||
model=self.config["model"],
|
||||
@ -106,18 +101,9 @@ class TS(basetrans):
|
||||
def translate(self, query):
|
||||
acss = self.checkchange()
|
||||
self.contextnum = int(self.config["context_num"])
|
||||
user_prompt = (
|
||||
self.config.get("user_user_prompt", "")
|
||||
if self.config.get("use_user_user_prompt", False)
|
||||
else ""
|
||||
query = self._gptlike_createquery(
|
||||
query, "use_user_user_prompt", "user_user_prompt"
|
||||
)
|
||||
try:
|
||||
if "{sentence}" in user_prompt:
|
||||
query = user_prompt.format(sentence=query)
|
||||
else:
|
||||
query = user_prompt + query
|
||||
except:
|
||||
pass
|
||||
message = []
|
||||
for _i in range(min(len(self.context) // 2, self.contextnum)):
|
||||
i = (
|
||||
|
@ -6,7 +6,7 @@ import zhconv, gobject
|
||||
import sqlite3, json
|
||||
import functools
|
||||
from myutils.config import globalconfig, translatorsetting
|
||||
from myutils.utils import stringfyerror, autosql, PriorityQueue
|
||||
from myutils.utils import stringfyerror, autosql, PriorityQueue, SafeFormatter
|
||||
from myutils.commonbase import ArgsEmptyExc, commonbase
|
||||
|
||||
|
||||
@ -266,6 +266,22 @@ class basetrans(commonbase):
|
||||
|
||||
return res
|
||||
|
||||
def _gptlike_createquery(self, query, usekey, tempk):
|
||||
user_prompt = (
|
||||
self.config.get(tempk, "") if self.config.get(usekey, False) else ""
|
||||
)
|
||||
fmt = SafeFormatter()
|
||||
return fmt.format(user_prompt, must_exists="sentence", sentence=query)
|
||||
|
||||
def _gptlike_createsys(self, usekey, tempk):
|
||||
|
||||
fmt = SafeFormatter()
|
||||
if self.config[usekey]:
|
||||
template = self.config[tempk]
|
||||
else:
|
||||
template = "You are a translator. Please help me translate the following {srclang} text into {tgtlang}, and you should only tell me the translation."
|
||||
return fmt.format(template, srclang=self.srclang, tgtlang=self.tgtlang)
|
||||
|
||||
def reinitandtrans(self, contentsolved, is_auto_run):
|
||||
if self.needreinit or self.initok == False:
|
||||
self.needreinit = False
|
||||
|
@ -1,6 +1,7 @@
|
||||
from traceback import print_exc
|
||||
import json
|
||||
from translator.basetranslator import basetrans
|
||||
from myutils.utils import SafeFormatter
|
||||
|
||||
|
||||
class TS(basetrans):
|
||||
@ -44,26 +45,12 @@ class TS(basetrans):
|
||||
def translate(self, query):
|
||||
self.checkempty(["API_KEY", "model"])
|
||||
self.contextnum = int(self.config["附带上下文个数"])
|
||||
user_prompt = (
|
||||
self.config.get("user_user_prompt", "")
|
||||
if self.config.get("use_user_user_prompt", False)
|
||||
else ""
|
||||
query = self._gptlike_createquery(
|
||||
query, "use_user_user_prompt", "user_user_prompt"
|
||||
)
|
||||
try:
|
||||
if "{sentence}" in user_prompt:
|
||||
query = user_prompt.format(sentence=query)
|
||||
else:
|
||||
query = user_prompt + query
|
||||
except:
|
||||
pass
|
||||
system = self._gptlike_createsys("使用自定义promt", "自定义promt")
|
||||
temperature = self.config["Temperature"]
|
||||
|
||||
if self.config["使用自定义promt"]:
|
||||
system = self.config["自定义promt"]
|
||||
else:
|
||||
system = "You are a translator, translate from {} to {}".format(
|
||||
self.srclang, self.tgtlang
|
||||
)
|
||||
message = []
|
||||
for _i in range(min(len(self.context) // 2, self.contextnum)):
|
||||
i = (
|
||||
|
@ -77,31 +77,13 @@ class TS(basetrans):
|
||||
def translate(self, query):
|
||||
self.checkempty(["SECRET_KEY", "model"])
|
||||
self.contextnum = int(self.config["附带上下文个数"])
|
||||
user_prompt = (
|
||||
self.config.get("user_user_prompt", "")
|
||||
if self.config.get("use_user_user_prompt", False)
|
||||
else ""
|
||||
)
|
||||
try:
|
||||
if "{sentence}" in user_prompt:
|
||||
query = user_prompt.format(sentence=query)
|
||||
else:
|
||||
query = user_prompt + query
|
||||
except:
|
||||
pass
|
||||
temperature = self.config["Temperature"]
|
||||
|
||||
if self.config["使用自定义promt"]:
|
||||
message = [{"role": "system", "content": self.config["自定义promt"]}]
|
||||
else:
|
||||
message = [
|
||||
{
|
||||
"role": "system",
|
||||
"message": "You are a translator. Please help me translate the following {} text into {}, and you should only tell me the translation.".format(
|
||||
self.srclang, self.tgtlang
|
||||
),
|
||||
},
|
||||
]
|
||||
query = self._gptlike_createquery(
|
||||
query, "use_user_user_prompt", "user_user_prompt"
|
||||
)
|
||||
sysprompt = self._gptlike_createsys("使用自定义promt", "自定义promt")
|
||||
message = [{"role": "system", "message": sysprompt}]
|
||||
temperature = self.config["Temperature"]
|
||||
|
||||
message.append(
|
||||
{
|
||||
|
@ -44,13 +44,7 @@ class commonllmdev(basetransdev):
|
||||
|
||||
def translate(self, content):
|
||||
self.injectjs()
|
||||
|
||||
if self.config["use_custom_prompt"]:
|
||||
prompt = self.config["custom_prompt"]
|
||||
else:
|
||||
prompt = "You are a translator. Please help me translate the following {} text into {}, and you should only tell me the translation.\n".format(
|
||||
self.srclang, self.tgtlang
|
||||
)
|
||||
prompt = self._gptlike_createsys("use_custom_prompt", "custom_prompt")
|
||||
content = prompt + content
|
||||
self.Runtime_evaluate(
|
||||
f"document.querySelector(`{repr(self.textarea_selector)}`).foucs()"
|
||||
|
@ -40,18 +40,9 @@ class TS(basetrans):
|
||||
}
|
||||
}
|
||||
model = self.config["model"]
|
||||
user_prompt = (
|
||||
self.config.get("user_user_prompt", "")
|
||||
if self.config.get("use_user_user_prompt", False)
|
||||
else ""
|
||||
query = self._gptlike_createquery(
|
||||
query, "use_user_user_prompt", "user_user_prompt"
|
||||
)
|
||||
try:
|
||||
if "{sentence}" in user_prompt:
|
||||
query = user_prompt.format(sentence=query)
|
||||
else:
|
||||
query = user_prompt + query
|
||||
except:
|
||||
pass
|
||||
safety = {
|
||||
"safety_settings": [
|
||||
{
|
||||
@ -72,21 +63,8 @@ class TS(basetrans):
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
if self.config["use_custom_prompt"]:
|
||||
sys_message = {
|
||||
"systemInstruction": {"parts": {"text": self.config["custom_prompt"]}}
|
||||
}
|
||||
else:
|
||||
sys_message = {
|
||||
"systemInstruction": {
|
||||
"parts": {
|
||||
"text": "You are a translator. Please help me translate the following {} text into {}, and you should only tell me the translation.".format(
|
||||
self.srclang, self.tgtlang
|
||||
),
|
||||
},
|
||||
},
|
||||
}
|
||||
sysprompt = self._gptlike_createsys("use_custom_prompt", "custom_prompt")
|
||||
sys_message = {"systemInstruction": {"parts": {"text": sysprompt}}}
|
||||
message = []
|
||||
for _i in range(min(len(self.context) // 2, self.contextnum)):
|
||||
i = (
|
||||
|
@ -1,6 +1,7 @@
|
||||
from translator.basetranslator import basetrans
|
||||
import json, requests
|
||||
from traceback import print_exc
|
||||
from myutils.utils import SafeFormatter
|
||||
|
||||
|
||||
class gptcommon(basetrans):
|
||||
@ -119,29 +120,11 @@ class gptcommon(basetrans):
|
||||
|
||||
def translate(self, query):
|
||||
self.contextnum = int(self.config["附带上下文个数"])
|
||||
user_prompt = (
|
||||
self.config.get("user_user_prompt", "")
|
||||
if self.config.get("use_user_user_prompt", False)
|
||||
else ""
|
||||
query = self._gptlike_createquery(
|
||||
query, "use_user_user_prompt", "user_user_prompt"
|
||||
)
|
||||
try:
|
||||
if "{sentence}" in user_prompt:
|
||||
query = user_prompt.format(sentence=query)
|
||||
else:
|
||||
query = user_prompt + query
|
||||
except:
|
||||
pass
|
||||
if self.config["使用自定义promt"]:
|
||||
message = [{"role": "system", "content": self.config["自定义promt"]}]
|
||||
else:
|
||||
message = [
|
||||
{
|
||||
"role": "system",
|
||||
"content": "You are a translator. Please help me translate the following {} text into {}, and you should only tell me the translation.".format(
|
||||
self.srclang, self.tgtlang
|
||||
),
|
||||
},
|
||||
]
|
||||
sysprompt = self._gptlike_createsys("使用自定义promt", "自定义promt")
|
||||
message = [{"role": "system", "content": sysprompt}]
|
||||
|
||||
for _i in range(min(len(self.context) // 2, self.contextnum)):
|
||||
i = (
|
||||
@ -152,7 +135,6 @@ class gptcommon(basetrans):
|
||||
message.append(self.context[i * 2])
|
||||
message.append(self.context[i * 2 + 1])
|
||||
message.append({"role": "user", "content": query})
|
||||
|
||||
usingstream = self.config["流式输出"]
|
||||
response = self.proxysession.post(
|
||||
self.createurl(),
|
||||
|
@ -115,30 +115,11 @@ class TS(basetrans):
|
||||
def translate(self, query):
|
||||
self.checkempty(["secret_id", "secret_key"])
|
||||
self.contextnum = int(self.config["context_num"])
|
||||
user_prompt = (
|
||||
self.config.get("user_user_prompt", "")
|
||||
if self.config.get("use_user_user_prompt", False)
|
||||
else ""
|
||||
query = self._gptlike_createquery(
|
||||
query, "use_user_user_prompt", "user_user_prompt"
|
||||
)
|
||||
try:
|
||||
if "{sentence}" in user_prompt:
|
||||
query = user_prompt.format(sentence=query)
|
||||
else:
|
||||
query = user_prompt + query
|
||||
except:
|
||||
pass
|
||||
print(query)
|
||||
if self.config["use_user_prompt"]:
|
||||
message = [{"Role": "system", "Content": self.config["user_prompt"]}]
|
||||
else:
|
||||
message = [
|
||||
{
|
||||
"Role": "system",
|
||||
"Content": "You are a translator. Please help me translate the following {} text into {}, and you should only tell me the translation.".format(
|
||||
self.srclang, self.tgtlang
|
||||
),
|
||||
},
|
||||
]
|
||||
sysprompt = self._gptlike_createsys("use_user_prompt", "user_prompt")
|
||||
message = [{"Role": "system", "Content": sysprompt}]
|
||||
|
||||
for _i in range(min(len(self.context) // 2, self.contextnum)):
|
||||
i = (
|
||||
|
@ -29,7 +29,7 @@ include(generate_product_version)
|
||||
|
||||
set(VERSION_MAJOR 5)
|
||||
set(VERSION_MINOR 33)
|
||||
set(VERSION_PATCH 2)
|
||||
set(VERSION_PATCH 3)
|
||||
|
||||
add_library(pch pch.cpp)
|
||||
target_precompile_headers(pch PUBLIC pch.h)
|
||||
|
Loading…
x
Reference in New Issue
Block a user