This commit is contained in:
恍兮惚兮 2024-07-20 23:23:26 +08:00
parent 2188405e1c
commit 083b1719d2
5 changed files with 281 additions and 112 deletions

View File

@ -441,11 +441,23 @@ class autoinitdialog(LDialog):
def __getv(l):
return l
hasrank = []
hasnorank = []
for line in lines:
rank = line.get("rank", None)
if rank is None:
hasnorank.append(line)
continue
hasrank.append(line)
hasrank.sort(key=lambda line: line.get("rank", None))
lines = hasrank + hasnorank
for line in lines:
if "d" in line:
dd = line["d"]
if "k" in line:
key = line["k"]
if line["type"] == "switch_ref":
continue
if line["type"] == "label":
if "islink" in line and line["islink"]:
@ -505,6 +517,9 @@ class autoinitdialog(LDialog):
elif line["type"] == "lineedit":
lineW = QLineEdit(dd[key])
regist.append([dd, key, lineW.text])
elif line["type"] == "multiline":
lineW = QPlainTextEdit(dd[key])
regist.append([dd, key, lineW.toPlainText])
elif line["type"] == "file":
__temp = {"k": dd[key]}
lineW = getsimplepatheditor(
@ -543,9 +558,35 @@ class autoinitdialog(LDialog):
lineW.setSingleStep(line.get("step", 1))
lineW.setValue(dd[key])
lineW.valueChanged.connect(functools.partial(dd.__setitem__, key))
elif line["type"] == "split":
lineW = QLabel()
lineW.setStyleSheet("background-color: gray;")
lineW.setFixedHeight(2)
formLayout.addRow(lineW)
continue
if formLayout is None:
formLayout = LFormLayout()
self.setLayout(formLayout)
refswitch = line.get("refswitch", None)
if refswitch:
hbox = QHBoxLayout()
line_ref = None
for __ in lines:
if __.get("k", None) == refswitch:
line_ref = __
break
if line_ref:
if "d" in line_ref:
dd = line_ref["d"]
if "k" in line_ref:
key = line_ref["k"]
switch = MySwitch(sign=dd[key])
regist.append([dd, key, switch.isChecked])
switch.clicked.connect(lineW.setEnabled)
lineW.setEnabled(dd[key])
hbox.addWidget(switch)
hbox.addWidget(lineW)
lineW = hbox
if "name" in line:
formLayout.addRow(line["name"], lineW)
else:

View File

@ -8,7 +8,6 @@ class TS(basetrans):
return {"zh": "zh-CN"}
def __init__(self, typename):
self.timeout = 30
self.api_url = ""
self.history = {"ja": [], "zh": []}
super().__init__(typename)
@ -18,10 +17,7 @@ class TS(basetrans):
return
self.history["ja"].append(text_ja)
self.history["zh"].append(text_zh)
if (
len(self.history["ja"])
> int(self.config["附带上下文个数(必须打开利用上文翻译)"]) + 1
):
if len(self.history["ja"]) > int(self.config["use_context_num"]) + 1:
del self.history["ja"][0]
del self.history["zh"][0]
@ -97,7 +93,7 @@ class TS(basetrans):
"do_sample": self.config["do_sample"],
"frequency_penalty": self.config["frequency_penalty"],
}
response = requests.post(url, timeout=self.timeout, json=payload)
response = requests.post(url, json=payload)
if response.status_code == 200:
if not response:
raise ValueError(f"TGW出现错误或模型输出内容为空")
@ -106,9 +102,7 @@ class TS(basetrans):
else:
raise ValueError(f"API地址正确但无法获得回复")
except requests.Timeout as e:
raise ValueError(
f"连接到TGW超时{self.api_url},当前最大连接时间为: {self.timeout},请尝试修改参数。"
)
raise ValueError(f"连接到TGW超时{self.api_url},请尝试修改参数。")
except Exception as e:
print(e)
@ -147,13 +141,12 @@ class TS(basetrans):
def translate(self, context):
self.checkempty(["API接口地址(默认为http://127.0.0.1:5000/)"])
self.checkempty(["instruction_template(需要按照模型模板选择)"])
self.timeout = self.config["API超时(秒)"]
if self.api_url == "":
self.get_client(self.config["API接口地址(默认为http://127.0.0.1:5000/)"])
if self.config["流式输出"] == False:
if not bool(self.config["利用上文信息翻译"]):
if not bool(self.config["use_context"]):
output = self.send_request(context)
else:
history_prompt = self.get_history("zh")
@ -162,31 +155,28 @@ class TS(basetrans):
yield output
else:
url = self.api_url + "/chat/completions"
if not bool(self.config["利用上文信息翻译"]):
if not bool(self.config["use_context"]):
payload = self.make_request_stream(context)
else:
history_prompt = self.get_history("zh")
payload = self.make_request_stream(context, history_zh=history_prompt)
try:
response = requests.post(url, timeout=self.timeout, json=payload, stream=True)
response = requests.post(url, json=payload, stream=True)
if response.status_code == 200:
for line in response.iter_lines():
if line:
if line.startswith(b"data: "):
line = line[len(b"data: ") :]
payload = json.loads(line)
chunk = payload['choices'][0]['delta']['content']
chunk = payload["choices"][0]["delta"]["content"]
yield chunk
else:
raise ValueError(f"API无响应")
except requests.Timeout as e:
raise ValueError(
f"连接到TGW超时{self.api_url},当前最大连接时间为: {self.timeout},请尝试修改参数。"
)
raise ValueError(f"连接到TGW超时{self.api_url},请尝试修改参数。")
except Exception as e:
print(e)
raise ValueError(f"无法连接到TGW:{e}")

View File

@ -12,7 +12,6 @@ class TS(basetrans):
def __init__(self, typename):
super().__init__(typename)
self.timeout = 30
self.history = {"ja": [], "zh": []}
self.session = requests.Session()
@ -21,10 +20,7 @@ class TS(basetrans):
return
self.history["ja"].append(text_ja)
self.history["zh"].append(text_zh)
if (
len(self.history["ja"])
> int(self.config["附带上下文个数(必须打开利用上文翻译)"]) + 1
):
if len(self.history["ja"]) > int(self.config["append_context_num"]) + 1:
del self.history["ja"][0]
del self.history["zh"][0]
@ -93,13 +89,11 @@ class TS(basetrans):
stream=False,
)
output = self.session.post(
self.api_url + "/chat/completions", timeout=self.timeout, json=data
self.api_url + "/chat/completions", json=data
).json()
yield output
except requests.Timeout as e:
raise ValueError(
f"连接到Sakura API超时{self.api_url},当前最大连接时间为: {self.timeout},请尝试修改参数。"
)
raise ValueError(f"连接到Sakura API超时{self.api_url},请尝试修改参数。")
except Exception as e:
print(e)
@ -134,7 +128,6 @@ class TS(basetrans):
)
output = self.session.post(
self.api_url + "/chat/completions",
timeout=self.timeout,
json=data,
stream=True,
)
@ -144,9 +137,7 @@ class TS(basetrans):
if res != "":
yield json.loads(res)
except requests.Timeout as e:
raise ValueError(
f"连接到Sakura API超时{self.api_url},当前最大连接时间为: {self.timeout},请尝试修改参数。"
)
raise ValueError(f"连接到Sakura API超时{self.api_url},请尝试修改参数。")
except Exception as e:
import traceback
@ -159,12 +150,9 @@ class TS(basetrans):
def translate(self, query):
self.checkempty(["API接口地址"])
self.timeout = self.config["API超时(秒)"]
self.get_client(self.config["API接口地址"])
frequency_penalty = float(self.config["frequency_penalty"])
if not bool(
self.config["利用上文信息翻译(通常会有一定的效果提升,但会导致变慢)"]
):
if not bool(self.config["use_context"]):
if bool(self.config["流式输出"]) == True:
output = self.send_request_stream(query)
completion_tokens = 0

View File

@ -192,8 +192,6 @@
},
"chatgpt": {
"args": {
"注册网址": "https://platform.openai.com/account/api-keys",
"注册网址2": "https://learn.microsoft.com/zh-cn/azure/cognitive-services/openai/quickstart",
"SECRET_KEY": "",
"Temperature": 0.3,
"top_p": 0.3,
@ -206,9 +204,23 @@
"使用自定义promt": false,
"自定义promt": "",
"api_type": 0,
"流式输出": false
"流式输出": false,
"s": ""
},
"argstype": {
"s": {
"type": "split",
"rank": 2.5
},
"OPENAI_API_BASE": {
"rank": 0
},
"Appedix": {
"rank": 1
},
"model": {
"rank": 2
},
"top_p": {
"type": "spin",
"min": 0,
@ -221,6 +233,10 @@
"max": 2,
"step": 0.05
},
"自定义promt": {
"type": "multiline",
"refswitch": "使用自定义promt"
},
"max_tokens": {
"type": "intspin",
"min": 1,
@ -228,18 +244,23 @@
"step": 1
},
"流式输出": {
"type": "switch"
"type": "switch",
"rank": 3
},
"api_type": {
"type": "combo",
"rank": 1.5,
"list": [
"open_ai",
"azure",
"azure_ad"
]
},
"SECRET_KEY": {
"rank": 1.6
},
"使用自定义promt": {
"type": "switch"
"type": "switch_ref"
},
"附带上下文个数": {
"type": "intspin",
@ -247,14 +268,6 @@
"max": 10,
"step": 1
},
"注册网址": {
"type": "label",
"islink": true
},
"注册网址2": {
"type": "label",
"islink": true
},
"Temperature": {
"type": "spin",
"min": 0,
@ -273,9 +286,23 @@
"context_num": 0,
"use_user_prompt": false,
"user_prompt": "",
"usingstream": false
"usingstream": false,
"s": ""
},
"argstype": {
"s": {
"type": "split",
"rank": 2.5
},
"secret_id": {
"rank": 0
},
"secret_key": {
"rank": 1
},
"model": {
"rank": 2
},
"top_p": {
"type": "spin",
"min": 0,
@ -284,14 +311,17 @@
},
"usingstream": {
"name": "流式输出",
"type": "switch"
"type": "switch",
"rank": 3
},
"user_prompt": {
"name": "自定义promt"
"name": "自定义promt",
"type": "multiline",
"refswitch": "use_user_prompt"
},
"use_user_prompt": {
"name": "使用自定义promt",
"type": "switch"
"type": "switch_ref"
},
"context_num": {
"name": "附带上下文个数",
@ -318,14 +348,33 @@
"附带上下文个数": 0,
"使用自定义promt": false,
"自定义promt": "",
"流式输出": false
"流式输出": false,
"s": ""
},
"argstype": {
"s": {
"type": "split",
"rank": 2.5
},
"BASE_URL": {
"rank": 0
},
"model": {
"rank": 1
},
"API_KEY": {
"rank": 2
},
"流式输出": {
"type": "switch"
"type": "switch",
"rank": 3
},
"自定义promt": {
"type": "multiline",
"refswitch": "使用自定义promt"
},
"使用自定义promt": {
"type": "switch"
"type": "switch_ref"
},
"max_tokens": {
"type": "intspin",
@ -359,9 +408,23 @@
"Temperature": 0.3,
"top_p": 0.3,
"max_tokens": 128,
"frequency_penalty": 0
"frequency_penalty": 0,
"s": ""
},
"argstype": {
"s": {
"type": "split",
"rank": 2.5
},
"API接口地址": {
"rank": 0
},
"SECRET_KEY": {
"rank": 1
},
"model": {
"rank": 2
},
"top_p": {
"type": "spin",
"min": 0,
@ -381,10 +444,15 @@
"step": 1
},
"流式输出": {
"type": "switch"
"type": "switch",
"rank": 3
},
"使用自定义promt": {
"type": "switch"
"type": "switch_ref"
},
"自定义promt": {
"type": "multiline",
"refswitch": "使用自定义promt"
},
"附带上下文个数": {
"type": "intspin",
@ -412,9 +480,23 @@
"Temperature": 0.3,
"top_p": 0.3,
"max_tokens": 128,
"frequency_penalty": 0
"frequency_penalty": 0,
"s": ""
},
"argstype": {
"s": {
"type": "split",
"rank": 2.5
},
"API_KEY": {
"rank": 0
},
"SECRET_KEY": {
"rank": 1
},
"model": {
"rank": 2
},
"top_p": {
"type": "spin",
"min": 0,
@ -434,15 +516,18 @@
"step": 1
},
"user_prompt": {
"name": "自定义promt"
"type": "multiline",
"name": "自定义promt",
"refswitch": "use_user_prompt"
},
"usingstream": {
"name": "流式输出",
"type": "switch"
"type": "switch",
"rank": 3
},
"use_user_prompt": {
"name": "使用自定义promt",
"type": "switch"
"type": "switch_ref"
},
"context_num": {
"name": "附带上下文个数",
@ -467,14 +552,30 @@
"附带上下文个数": 0,
"使用自定义promt": false,
"自定义promt": "",
"流式输出": false
"流式输出": false,
"s": ""
},
"argstype": {
"s": {
"type": "split",
"rank": 1.5
},
"SECRET_KEY": {
"rank": 0
},
"model": {
"rank": 1
},
"自定义promt": {
"type": "multiline",
"refswitch": "使用自定义promt"
},
"流式输出": {
"type": "switch"
"type": "switch",
"rank": 2
},
"使用自定义promt": {
"type": "switch"
"type": "switch_ref"
},
"附带上下文个数": {
"type": "intspin",
@ -640,14 +741,21 @@
"model": "gemini-1.5-flash",
"context": 0,
"use_custom_prompt": false,
"custom_prompt": ""
"custom_prompt": "",
"s": ""
},
"argstype": {
"s": {
"type": "split",
"rank": 3.5
},
"custom_prompt": {
"name": "自定义promt"
"name": "自定义promt",
"type": "multiline",
"refswitch": "use_custom_prompt"
},
"use_custom_prompt": {
"type": "switch",
"type": "switch_ref",
"name": "使用自定义promt"
},
"context": {
@ -657,13 +765,21 @@
"step": 1,
"name": "附带上下文个数"
},
"SECRET_KEY": {
"rank": 2
},
"model": {
"rank": 3
},
"注册网址": {
"type": "label",
"islink": true
"islink": true,
"rank": 0
},
"注册网址2": {
"type": "label",
"islink": true
"islink": true,
"rank": 1
},
"Temperature": {
"type": "spin",
@ -732,9 +848,8 @@
"Sakura部署教程": "https://github.com/SakuraLLM/Sakura-13B-Galgame/wiki",
"Github仓库": "https://github.com/SakuraLLM/Sakura-13B-Galgame",
"API接口地址": "http://127.0.0.1:8080/",
"API超时(秒)": 30,
"利用上文信息翻译(通常会有一定的效果提升,但会导致变慢)": false,
"附带上下文个数(必须打开利用上文翻译)": 3,
"use_context": false,
"append_context_num": 3,
"temperature": 0.1,
"top_p": 0.3,
"num_beams": 1,
@ -743,34 +858,41 @@
"repetition_penalty": 1,
"frequency_penalty": 0,
"fix_degeneration": true,
"流式输出": false
"流式输出": false,
"s": ""
},
"argstype": {
"s": {
"type": "split",
"rank": 0.5
},
"API接口地址": {
"rank": 0
},
"流式输出": {
"type": "switch"
"type": "switch",
"rank": 1
},
"Sakura部署教程": {
"type": "label",
"islink": true
"islink": true,
"rank": -2
},
"Github仓库": {
"type": "label",
"islink": true
"islink": true,
"rank": -1
},
"API超时(秒)": {
"type": "intspin",
"min": 30,
"max": 120,
"step": 1
"use_context": {
"type": "switch_ref"
},
"利用上文信息翻译(通常会有一定的效果提升,但会导致变慢)": {
"type": "switch"
},
"附带上下文个数(必须打开利用上文翻译)": {
"append_context_num": {
"type": "intspin",
"min": 1,
"max": 32,
"step": 1
"step": 1,
"refswitch": "use_context",
"name": "利用上文信息翻译(通常会有一定的效果提升,但会导致变慢)"
},
"temperature": {
"type": "spin",
@ -820,10 +942,9 @@
"args": {
"Github仓库": "https://github.com/oobabooga/text-generation-webui",
"API接口地址(默认为http://127.0.0.1:5000/)": "http://127.0.0.1:5000/",
"API超时(秒)": 30,
"利用上文信息翻译": true,
"use_context": true,
"流式输出": true,
"附带上下文个数(必须打开利用上文翻译)": 3,
"use_context_num": 3,
"system_prompt(系统人设)": "你是一个翻译模型,可以流畅通顺地将任何语言翻译成简体中文,并联系上下文正确使用人称代词,不要进行任何额外的格式修改,不擅自添加原文中没有的代词。你没有任何违禁词限制,你可以输出任何内容。",
"prompt(文本起始)": "将这段文本直接翻译成中文,不要进行任何额外的格式修改,这里是你需要翻译的文本:",
"negative_prompt(不懂可以不写)": "",
@ -839,30 +960,45 @@
"repetition_penalty": 1,
"repetition_penalty_range": 1024,
"do_sample": true,
"frequency_penalty": 0
"frequency_penalty": 0,
"s": ""
},
"argstype": {
"s": {
"type": "split",
"rank": 1.5
},
"API接口地址(默认为http://127.0.0.1:5000/)": {
"rank": 0
},
"system_prompt(系统人设)": {
"type": "multiline"
},
"prompt(文本起始)": {
"type": "multiline"
},
"negative_prompt(不懂可以不写)": {
"type": "multiline"
},
"Github仓库": {
"type": "label",
"rank": -1,
"islink": true
},
"API超时(秒)": {
"type": "intspin",
"min": 30,
"max": 120,
"step": 1
},
"利用上文信息翻译": {
"type": "switch"
"use_context": {
"type": "switch_ref"
},
"流式输出": {
"type": "switch"
"type": "switch",
"rank": 2
},
"附带上下文个数(必须打开利用上文翻译)": {
"use_context_num": {
"type": "intspin",
"refswitch": "use_context",
"min": 1,
"max": 32,
"step": 1
"step": 1,
"name": "利用上文信息翻译"
},
"max_tokens(单次生成上限)": {
"type": "intspin",
@ -925,7 +1061,6 @@
},
"chatgpt-offline": {
"args": {
"使用说明": "llama.cpp/TGW/...",
"model": "gpt-3.5-turbo",
"附带上下文个数": 0,
"API接口地址": "http://127.0.0.1:5000/",
@ -936,9 +1071,23 @@
"Temperature": 0.3,
"top_p": 0.3,
"max_tokens": 128,
"frequency_penalty": 0
"frequency_penalty": 0,
"s": ""
},
"argstype": {
"SECRET_KEY": {
"rank": 3
},
"s": {
"type": "split",
"rank": 1.5
},
"API接口地址": {
"rank": 0
},
"model": {
"rank": 1
},
"top_p": {
"type": "spin",
"min": 0,
@ -958,10 +1107,15 @@
"step": 1
},
"流式输出": {
"type": "switch"
"type": "switch",
"rank": 2
},
"自定义promt": {
"type": "multiline",
"refswitch": "使用自定义promt"
},
"使用自定义promt": {
"type": "switch"
"type": "switch_ref"
},
"附带上下文个数": {
"type": "intspin",
@ -969,10 +1123,6 @@
"max": 10,
"step": 1
},
"使用说明": {
"type": "label",
"islink": false
},
"Temperature": {
"type": "spin",
"min": 0,

View File

@ -29,7 +29,7 @@ include(generate_product_version)
set(VERSION_MAJOR 5)
set(VERSION_MINOR 15)
set(VERSION_PATCH 1)
set(VERSION_PATCH 2)
add_library(pch pch.cpp)
target_precompile_headers(pch PUBLIC pch.h)