listmodels

This commit is contained in:
恍兮惚兮 2024-09-05 12:47:58 +08:00
parent 67570fc974
commit d93a258d3f
31 changed files with 439 additions and 439 deletions

View File

@ -521,13 +521,13 @@ def getalistname(parent, callback, skipid=False, skipidid=None, title="添加到
if len(__uid) > 1:
autoinitdialog(
parent,
__d,
title,
600,
[
{
"type": "combo",
"name": "目标列表",
"d": __d,
"k": "k",
"list": __vis,
},

View File

@ -268,6 +268,7 @@ def maybehavebutton(self, gameuid, post):
callback = functools.partial(
autoinitdialog,
self,
save_text_process_info["postprocessconfig"][post]['args'],
postprocessconfig[post]["name"],
600,
items,
@ -999,13 +1000,13 @@ class dialog_setting_game_internal(QWidget):
__d = {"k": 0}
autoinitdialog(
self,
__d,
("预处理方法"),
400,
[
{
"type": "combo",
"name": ("预处理方法"),
"d": __d,
"k": "k",
"list": __viss,
},

View File

@ -156,13 +156,13 @@ def getselectpos(parent, callback):
if len(__uid) > 1:
autoinitdialog(
parent,
__d,
"位置",
600,
[
{
"type": "combo",
"name": "位置",
"d": __d,
"k": "k",
"list": __vis,
},

View File

@ -14,11 +14,11 @@ from gui.usefulwidget import (
TableViewW,
getsimplepatheditor,
FocusSpin,
FocusDoubleSpin,
LFocusCombo,
getsimplecombobox,
getspinbox,
SplitLine,
getIconButton,
)
from gui.dynalang import (
LFormLayout,
@ -27,7 +27,6 @@ from gui.dynalang import (
LStandardItemModel,
LDialog,
LDialog,
LAction,
)
@ -408,7 +407,7 @@ class yuyinzhidingsetting(LDialog):
def autoinitdialog_items(dic):
items = []
for arg in dic["args"]:
default = dict(name=arg, d=dic["args"], k=arg, type="lineedit")
default = dict(name=arg, k=arg, type="lineedit")
if "argstype" in dic and arg in dic["argstype"]:
default.update(dic["argstype"][arg])
@ -418,10 +417,11 @@ def autoinitdialog_items(dic):
@Singleton_close
class autoinitdialog(LDialog):
def __init__(self, parent, title, width, lines, _=None) -> None:
class autoinitdialog__(LDialog):
def __init__(
self, parent, dd, title, width, lines, modelfile=None, maybehasextrainfo=None
) -> None:
super().__init__(parent, Qt.WindowType.WindowCloseButtonHint)
self.setWindowTitle(title)
self.resize(QSize(width, 10))
for line in lines:
@ -439,11 +439,11 @@ class autoinitdialog(LDialog):
return
formLayout = LFormLayout()
self.setLayout(formLayout)
regist = []
regist = {}
def save(callback=None):
for l in regist:
l[0][l[1]] = l[2]()
for k in regist:
dd[k] = regist[k]()
self.close()
if callback:
try:
@ -470,6 +470,10 @@ class autoinitdialog(LDialog):
refswitch = line.get("refswitch", None)
if refswitch:
refname2line[refswitch] = None
list_cache = line.get("list_cache", None)
if list_cache:
refname2line[list_cache] = None
oklines = []
for line in lines:
@ -480,8 +484,6 @@ class autoinitdialog(LDialog):
oklines.append(line)
lines = oklines
for line in lines:
if "d" in line:
dd = line["d"]
if "k" in line:
key = line["k"]
if line["type"] == "label":
@ -492,12 +494,9 @@ class autoinitdialog(LDialog):
else:
lineW = LLabel(dd[key])
elif line["type"] == "textlist":
__list = dd[key]
e = listediterline(line["name"], line["header"], __list)
regist.append([dd, key, functools.partial(__getv, __list)])
lineW = QHBoxLayout()
lineW.addWidget(e)
__list = dd[key].copy()
lineW = listediterline(line["name"], line["header"], __list)
regist[key] = functools.partial(__getv, __list)
elif line["type"] == "combo":
lineW = LFocusCombo()
if "list_function" in line:
@ -513,9 +512,48 @@ class autoinitdialog(LDialog):
items = line["list"]
lineW.addItems(items)
lineW.setCurrentIndex(dd.get(key, 0))
lineW.currentIndexChanged.connect(
functools.partial(dd.__setitem__, key)
)
regist[key] = lineW.currentIndex
elif line["type"] == "lineedit_or_combo":
line1 = QLineEdit()
lineW = QHBoxLayout()
combo = LFocusCombo()
combo.setLineEdit(line1)
def __refresh(regist, line, combo: LFocusCombo):
try:
func = getattr(
importlib.import_module(modelfile), line["list_function"]
)
items = func(maybehasextrainfo, regist)
curr = combo.currentText()
combo.clear()
combo.addItems(items)
if curr in items:
combo.setCurrentIndex(items.index(curr))
dd[refname2line[line["list_cache"]]["k"]] = items
except Exception as e:
print_exc()
QMessageBox.information(self, str(type(e))[8:-2], str(e))
if "list_function" in line:
items = dd[refname2line[line["list_cache"]]["k"]]
else:
items = line["list"]
combo.addItems(items)
if dd[key] in items:
combo.setCurrentIndex(items.index(dd[key]))
else:
combo.setCurrentText(dd[key])
regist[key] = combo.currentText
if "list_function" in line:
lineW.addWidget(
getIconButton(
callback=functools.partial(__refresh, regist, line, combo),
icon="fa.refresh",
)
)
lineW.addWidget(combo)
elif line["type"] == "okcancel":
lineW = QDialogButtonBox(
QDialogButtonBox.StandardButton.Ok
@ -532,10 +570,10 @@ class autoinitdialog(LDialog):
)
elif line["type"] == "lineedit":
lineW = QLineEdit(dd[key])
regist.append([dd, key, lineW.text])
regist[key] = lineW.text
elif line["type"] == "multiline":
lineW = QPlainTextEdit(dd[key])
regist.append([dd, key, lineW.toPlainText])
regist[key] = lineW.toPlainText
elif line["type"] == "file":
__temp = {"k": dd[key]}
lineW = getsimplepatheditor(
@ -550,25 +588,28 @@ class autoinitdialog(LDialog):
dirorfile=line.get("dirorfile", False),
)
regist.append([dd, key, functools.partial(__temp.__getitem__, "k")])
regist[key] = functools.partial(__temp.__getitem__, "k")
elif line["type"] == "switch":
lineW = MySwitch(sign=dd[key])
regist.append([dd, key, lineW.isChecked])
regist[key] = lineW.isChecked
_ = QHBoxLayout()
_.addStretch()
_.addWidget(lineW)
_.addStretch()
lineW = _
elif line["type"] in ["spin", "intspin"]:
__temp = {"k": dd[key]}
lineW = getspinbox(
line.get("min", 0),
line.get("max", 100),
dd,
key,
__temp,
"k",
line["type"] == "spin",
line.get("step", 0.1),
)
regist[key] = lineW.value
elif line["type"] == "split":
lineW = SplitLine()
formLayout.addRow(lineW)
@ -578,12 +619,10 @@ class autoinitdialog(LDialog):
hbox = QHBoxLayout()
line_ref = refname2line.get(refswitch, None)
if line_ref:
if "d" in line_ref:
dd = line_ref["d"]
if "k" in line_ref:
key = line_ref["k"]
switch = MySwitch(sign=dd[key])
regist.append([dd, key, switch.isChecked])
regist[key] = switch.isChecked
switch.clicked.connect(lineW.setEnabled)
lineW.setEnabled(dd[key])
hbox.addWidget(switch)
@ -596,18 +635,28 @@ class autoinitdialog(LDialog):
self.show()
def autoinitdialogx(
parent, dd, title, width, lines, modelfile, maybehasextrainfo, _=None
):
autoinitdialog__(parent, dd, title, width, lines, modelfile, maybehasextrainfo)
def autoinitdialog(parent, dd, title, width, lines, _=None):
autoinitdialog__(parent, dd, title, width, lines)
def getsomepath1(
parent, title, d, k, label, callback=None, isdir=False, filter1="*.db"
):
autoinitdialog(
parent,
d,
title,
800,
[
{
"type": "file",
"name": label,
"d": d,
"k": k,
"dir": isdir,
"filter": filter1,

View File

@ -37,6 +37,7 @@ def gethiragrid(self):
callback=functools.partial(
autoinitdialog,
self,
globalconfig["hirasetting"][name]['args'],
globalconfig["hirasetting"][name]["name"],
800,
items,
@ -139,6 +140,7 @@ def initinternal(self, names):
callback=functools.partial(
autoinitdialog,
self,
globalconfig["cishu"][cishu]['args'],
globalconfig["cishu"][cishu]["name"],
800,
items,

View File

@ -188,13 +188,13 @@ def installqwebdialog(self, link):
base = link.split("/")[-1]
autoinitdialog(
self,
dd,
"安装_QWebEngine",
800,
[
{
"type": "file",
"name": "路径",
"d": dd,
"k": "k",
"dir": False,
"filter": base,

View File

@ -2,7 +2,7 @@ from qtsymbols import *
import functools, os
from myutils.config import globalconfig, ocrsetting, ocrerrorfix, static_data
from myutils.utils import splitocrtypes, dynamiclink
from gui.inputdialog import autoinitdialog, postconfigdialog, autoinitdialog_items
from gui.inputdialog import autoinitdialogx, postconfigdialog, autoinitdialog_items
from gui.usefulwidget import (
D_getsimplecombobox,
D_getspinbox,
@ -139,7 +139,14 @@ def initgridsources(self, names):
items = autoinitdialog_items(ocrsetting[name])
_3 = D_getIconButton(
callback=functools.partial(
autoinitdialog, self, globalconfig["ocr"][name]["name"], 800, items
autoinitdialogx,
self,
ocrsetting[name]["args"],
globalconfig["ocr"][name]["name"],
800,
items,
"ocrengines." + name,
name,
),
icon="fa.gear",
)

View File

@ -9,7 +9,7 @@ from myutils.utils import (
translate_exits,
)
from gui.pretransfile import sqlite2json
from gui.inputdialog import autoinitdialog, autoinitdialog_items
from gui.inputdialog import autoinitdialog, autoinitdialog_items, autoinitdialogx
from gui.usefulwidget import (
D_getspinbox,
getIconButton,
@ -95,7 +95,6 @@ def getalistname(parent, copy, btnplus, callback):
{
"type": "combo",
"name": "复制自" if not copy else "删除",
"d": __d,
"k": "k",
"list": __vis,
}
@ -105,7 +104,6 @@ def getalistname(parent, copy, btnplus, callback):
{
"name": "命名为",
"type": "lineedit",
"d": __d,
"k": "n",
}
)
@ -118,6 +116,7 @@ def getalistname(parent, copy, btnplus, callback):
)
autoinitdialog(
parent,
__d,
("删除" if copy else "复制") + "接口",
600,
__,
@ -148,13 +147,17 @@ def selectllmcallback(self, countnum, btnplus, fanyi, name):
layout: QGridLayout = getattr(self, "damoxinggridinternal" + btnplus)
items = autoinitdialog_items(translatorsetting[uid])
last = getIconButton(
callback=functools.partial(
autoinitdialog,
autoinitdialogx,
self,
translatorsetting[uid]['args'],
(globalconfig["fanyi"][uid]["name"]),
800,
items,
"userconfig.copyed."+uid,
uid,
),
icon="fa.gear",
)
@ -305,21 +308,29 @@ def initsome11(self, l, label=None, btnplus=False):
line = []
countnum = []
for fanyi in l:
if not translate_exits(fanyi):
which=translate_exits(fanyi,which=True)
if which is None:
continue
i += 1
countnum.append(fanyi)
if fanyi in translatorsetting:
items = autoinitdialog_items(translatorsetting[fanyi])
if which==0:
aclass = "translator." + fanyi
elif which == 1:
aclass = "userconfig.copyed." + fanyi
last = D_getIconButton(
callback=functools.partial(
autoinitdialog,
autoinitdialogx,
self,
(globalconfig["fanyi"][fanyi]["name"]),
translatorsetting[fanyi]['args'],
globalconfig["fanyi"][fanyi]["name"],
800,
items,
aclass,
fanyi,
),
icon="fa.gear",
)
@ -494,7 +505,6 @@ def setTabTwo_lazy(self, basel):
"dir": False,
"filter": "*.exe",
"name": "Chromium_路径",
"d": globalconfig,
"k": "chromepath",
},
{"type": "okcancel"},
@ -513,6 +523,7 @@ def setTabTwo_lazy(self, basel):
callback=functools.partial(
autoinitdialog,
self,
globalconfig,
"Chromium_路径",
800,
_items,

View File

@ -145,6 +145,7 @@ def setTab7_lazy(self, basel):
callback = functools.partial(
autoinitdialog,
self,
postprocessconfig[post]['args'],
postprocessconfig[post]["name"],
600,
items,

View File

@ -86,6 +86,7 @@ def getttsgrid(self, names):
callback=functools.partial(
autoinitdialog,
self,
globalconfig["reader"][name]['args'],
globalconfig["reader"][name]["name"],
800,
items,

View File

@ -878,3 +878,42 @@ class SafeFormatter(Formatter):
else:
print(f"{key} is missing")
return key
def checkv1(api_url: str):
# 傻逼豆包大模型是非要v3不是v1
if api_url.endswith("/v3"):
return api_url
elif api_url.endswith("/v3/"):
return api_url[:-1]
# 智谱AI
elif api_url.endswith("/v4"):
return api_url
elif api_url.endswith("/v4/"):
return api_url[:-1]
# 正常的
elif api_url.endswith("/v1"):
return api_url
elif api_url.endswith("/v1/"):
return api_url[:-1]
elif api_url.endswith("/"):
return api_url + "v1"
else:
return api_url + "/v1"
def createurl(url: str):
if url.endswith("/chat/completions"):
pass
else:
url = checkv1(url) + "/chat/completions"
return url
def createenglishlangmap():
return dict(
zip(
static_data["language_list_translator_inner"],
static_data["language_list_translator_inner_english"],
)
)

View File

@ -1,27 +1,26 @@
from ocrengines.baseocrclass import baseocr
import base64
import base64, requests
from myutils.utils import createurl, createenglishlangmap
from myutils.proxy import getproxy
def list_models(typename, regist):
js = requests.get(
createurl(regist["apiurl"]())[: -len("/chat/completions")] + "/models",
headers={"Authorization": "Bearer " + regist["SECRET_KEY"]().split("|")[0]},
proxies=getproxy(("ocr", typename)),
).json()
try:
return [_["id"] for _ in js["data"]]
except:
raise Exception(js)
class OCR(baseocr):
def langmap(self):
return {
"zh": "Simplified Chinese",
"ja": "Japanese",
"en": "English",
"ru": "Russian",
"es": "Spanish",
"ko": "Korean",
"fr": "French",
"cht": "Traditional Chinese",
"vi": "Vietnamese",
"tr": "Turkish",
"pl": "Polish",
"uk": "Ukrainian",
"it": "Italian",
"ar": "Arabic",
"th": "Thai",
}
return createenglishlangmap()
def createdata(self, message):
temperature = self.config["Temperature"]
@ -42,27 +41,6 @@ class OCR(baseocr):
def createheaders(self):
return {"Authorization": "Bearer " + self.config["SECRET_KEY"]}
def checkv1(self, api_url: str):
# 傻逼豆包大模型是非要v3不是v1
if api_url.endswith("/v3"):
return api_url
elif api_url.endswith("/v3/"):
return api_url[:-1]
# 智谱AI
elif api_url.endswith("/v4"):
return api_url
elif api_url.endswith("/v4/"):
return api_url[:-1]
# 正常的
elif api_url.endswith("/v1"):
return api_url
elif api_url.endswith("/v1/"):
return api_url[:-1]
elif api_url.endswith("/"):
return api_url + "v1"
else:
return api_url + "/v1"
def ocr(self, imagebinary):
if self.config["use_custom_prompt"]:
@ -103,9 +81,4 @@ class OCR(baseocr):
raise Exception(response.text)
def createurl(self):
url = self.config["apiurl"]
if url.endswith("/chat/completions"):
pass
else:
url = self.checkv1(url) + "/chat/completions"
return url
return createurl(self.config["apiurl"])

View File

@ -1,27 +1,35 @@
import base64
import requests
from ocrengines.baseocrclass import baseocr
from myutils.utils import createenglishlangmap
from myutils.proxy import getproxy
def list_models(typename, regist):
js = requests.get(
"https://generativelanguage.googleapis.com/v1beta/models",
params={"key": regist["key"]().split("|")[0]},
proxies=getproxy(("ocr", typename)),
).json()
try:
models = js["models"]
except:
raise Exception(js)
mm = []
for m in models:
name: str = m["name"]
supportedGenerationMethods: list = m["supportedGenerationMethods"]
if "generateContent" not in supportedGenerationMethods:
continue
if name.startswith("models/"):
name = name[7:]
mm.append(name)
return mm
class OCR(baseocr):
def langmap(self):
return {
"zh": "Simplified Chinese",
"ja": "Japanese",
"en": "English",
"ru": "Russian",
"es": "Spanish",
"ko": "Korean",
"fr": "French",
"cht": "Traditional Chinese",
"vi": "Vietnamese",
"tr": "Turkish",
"pl": "Polish",
"uk": "Ukrainian",
"it": "Italian",
"ar": "Arabic",
"th": "Thai",
}
return createenglishlangmap()
def ocr(self, imagebinary):
self.checkempty(["key"])

View File

@ -117,7 +117,6 @@ class OCR(baseocr):
"X-TC-Region": region,
},
data=payload,
timeout=10,
)
try:

View File

@ -265,6 +265,8 @@ class Requester_common:
headers["Content-Type"] = contenttype
proxy = proxies.get(scheme, None) if proxies else None
proxy = None if proxy == "" else proxy
if timeout is None:
timeout = 10
if timeout:
if isinstance(timeout, (float, int)):
timeout = int(timeout * 1000) # convert to milliseconds

View File

@ -1,15 +0,0 @@
from translator.gptcommon import gptcommon
class TS(gptcommon):
def createurl(self):
return f'https://{self.config["endpoint"]}/openai/deployments/{self.config["deployment-id"]}/completions?api-version={self.config["api-version"]}'
def createheaders(self):
_ = super().createheaders()
_.update({"api-key": self.multiapikeycurrent["api-key"]})
return _
def translate(self, query):
self.checkempty(["api-key", "api-version", "endpoint", "deployment-id"])
return super().translate(query)

View File

@ -1,28 +1,12 @@
from translator.basetranslator import basetrans
import json, requests
from traceback import print_exc
from myutils.utils import createenglishlangmap
class TS(basetrans):
def langmap(self):
return {
"zh": "Simplified Chinese",
"ja": "Japanese",
"en": "English",
"ru": "Russian",
"es": "Spanish",
"ko": "Korean",
"fr": "French",
"cht": "Traditional Chinese",
"vi": "Vietnamese",
"tr": "Turkish",
"pl": "Polish",
"uk": "Ukrainian",
"it": "Italian",
"ar": "Arabic",
"th": "Thai",
}
return createenglishlangmap()
def __init__(self, typename):
self.context = []

View File

@ -1,4 +1,4 @@
from translator.gptcommon import gptcommon
from translator.gptcommon import gptcommon, list_models
class TS(gptcommon):

View File

@ -1,28 +1,24 @@
from traceback import print_exc
import json
from translator.basetranslator import basetrans
from myutils.utils import SafeFormatter
from myutils.utils import createenglishlangmap
def checkv1(api_url):
if api_url[-4:] == "/v1/":
api_url = api_url[:-1]
elif api_url[-3:] == "/v1":
pass
elif api_url[-1] == "/":
api_url += "v1"
else:
api_url += "/v1"
return api_url
class TS(basetrans):
def langmap(self):
return {
"zh": "Simplified Chinese",
"ja": "Japanese",
"en": "English",
"ru": "Russian",
"es": "Spanish",
"ko": "Korean",
"fr": "French",
"cht": "Traditional Chinese",
"vi": "Vietnamese",
"tr": "Turkish",
"pl": "Polish",
"uk": "Ukrainian",
"it": "Italian",
"ar": "Arabic",
"th": "Thai",
}
return createenglishlangmap()
def __init__(self, typename):
self.context = []
@ -31,17 +27,6 @@ class TS(basetrans):
def inittranslator(self):
self.api_key = None
def checkv1(self, api_url):
if api_url[-4:] == "/v1/":
api_url = api_url[:-1]
elif api_url[-3:] == "/v1":
pass
elif api_url[-1] == "/":
api_url += "v1"
else:
api_url += "/v1"
return api_url
def translate(self, query):
self.checkempty(["API_KEY", "model"])
self.contextnum = int(self.config["附带上下文个数"])
@ -65,7 +50,6 @@ class TS(basetrans):
headers = {
"anthropic-version": "2023-06-01",
"accept": "application/json",
"anthropic-version": "2023-06-01",
"content-type": "application/json",
"X-Api-Key": self.multiapikeycurrent["API_KEY"],
}
@ -80,7 +64,7 @@ class TS(basetrans):
stream=usingstream,
)
response = self.proxysession.post(
self.checkv1(self.config["BASE_URL"]) + "/messages",
checkv1(self.config["BASE_URL"]) + "/messages",
headers=headers,
json=data,
stream=usingstream,

View File

@ -1,6 +1,9 @@
from traceback import print_exc
import json
import json, requests
from myutils.utils import createenglishlangmap
from translator.basetranslator import basetrans
from myutils.proxy import getproxy
"""
{'response_id': 'f6299ecb-b90a-4582-84e9-3c5c5c586919', 'text': 'In Chinese characters, "Monday" is written as: 星期一\n\nIs there anything else you would like me to translate for you?', 'generation_id': '998f2d14-1af7-4ec3-8699-b164c67a6900', 'chat_history': [{'role': 'USER', 'message': 'translate it to chinese'}, {'role': 'CHATBOT', 'message': 'ok'}, {'role': 'USER', 'message': 'today is monday'}, {'role': 'CHATBOT', 'message': 'In Chinese characters, "Monday" is written as: 星期一\n\nIs there anything else you would like me to translate for you?'}], 'finish_reason': 'COMPLETE', 'meta': {'api_version': {'version': '1'}, 'billed_units': {'input_tokens': 10, 'output_tokens': 29}, 'tokens': {'input_tokens': 82, 'output_tokens': 29}}}
@ -38,23 +41,7 @@ from translator.basetranslator import basetrans
class TS(basetrans):
def langmap(self):
return {
"zh": "Simplified Chinese",
"ja": "Japanese",
"en": "English",
"ru": "Russian",
"es": "Spanish",
"ko": "Korean",
"fr": "French",
"cht": "Traditional Chinese",
"vi": "Vietnamese",
"tr": "Turkish",
"pl": "Polish",
"uk": "Ukrainian",
"it": "Italian",
"ar": "Arabic",
"th": "Thai",
}
return createenglishlangmap()
def __init__(self, typename):
self.context = []
@ -63,17 +50,6 @@ class TS(basetrans):
def inittranslator(self):
self.api_key = None
def checkv1(self, api_url):
if api_url[-4:] == "/v1/":
api_url = api_url[:-1]
elif api_url[-3:] == "/v1":
pass
elif api_url[-1] == "/":
api_url += "v1"
else:
api_url += "/v1"
return api_url
def translate(self, query):
self.checkempty(["SECRET_KEY", "model"])
self.contextnum = int(self.config["附带上下文个数"])
@ -150,3 +126,25 @@ class TS(basetrans):
yield message
self.context.append({"role": "USER", "message": query})
self.context.append({"role": "CHATBOT", "message": message})
def list_models(typename, regist):
js = requests.get(
"https://api.cohere.com/v1/models",
headers={
"Authorization": "Bearer " + regist["SECRET_KEY"]().split("|")[0],
"X-Client-Name": "my-cool-project",
},
proxies=getproxy(("fanyi", typename)),
).json()
try:
models = js["models"]
except:
raise Exception(js)
mm = []
for m in models:
endpoints = m["endpoints"]
if "chat" not in endpoints:
continue
mm.append(m["name"])
return mm

View File

@ -1,6 +1,6 @@
from translator.basetranslator_dev import basetransdev
import time, os
from myutils.utils import createenglishlangmap
class commonllmdev(basetransdev):
jsfile = ...
@ -10,23 +10,7 @@ class commonllmdev(basetransdev):
function2 = ...
def langmap(self):
return {
"zh": "Simplified Chinese",
"ja": "Japanese",
"en": "English",
"ru": "Russian",
"es": "Spanish",
"ko": "Korean",
"fr": "French",
"cht": "Traditional Chinese",
"vi": "Vietnamese",
"tr": "Turkish",
"pl": "Polish",
"uk": "Ukrainian",
"it": "Italian",
"ar": "Arabic",
"th": "Thai",
}
return createenglishlangmap()
def injectjs(self):
with open(

View File

@ -1,26 +1,12 @@
from translator.basetranslator import basetrans
import json
from myutils.utils import createenglishlangmap
import json, requests
from myutils.proxy import getproxy
class TS(basetrans):
def langmap(self):
return {
"zh": "Simplified Chinese",
"ja": "Japanese",
"en": "English",
"ru": "Russian",
"es": "Spanish",
"ko": "Korean",
"fr": "French",
"cht": "Traditional Chinese",
"vi": "Vietnamese",
"tr": "Turkish",
"pl": "Polish",
"uk": "Ukrainian",
"it": "Italian",
"ar": "Arabic",
"th": "Thai",
}
return createenglishlangmap()
def __init__(self, typename):
self.context = []
@ -103,3 +89,25 @@ class TS(basetrans):
yield line
self.context.append({"role": "user", "parts": [{"text": query}]})
self.context.append({"role": "model", "parts": [{"text": line}]})
def list_models(typename, regist):
js = requests.get(
"https://generativelanguage.googleapis.com/v1beta/models",
params={"key": regist["SECRET_KEY"]().split("|")[0]},
proxies=getproxy(("fanyi", typename)),
).json()
try:
models = js["models"]
except:
raise Exception(js)
mm = []
for m in models:
name: str = m["name"]
supportedGenerationMethods: list = m["supportedGenerationMethods"]
if "generateContent" not in supportedGenerationMethods:
continue
if name.startswith("models/"):
name = name[7:]
mm.append(name)
return mm

View File

@ -1,29 +1,27 @@
from translator.basetranslator import basetrans
import json, requests
from traceback import print_exc
from myutils.utils import SafeFormatter
from myutils.utils import createurl, createenglishlangmap
from myutils.proxy import getproxy
def list_models(typename, regist):
js = requests.get(
createurl(regist["API接口地址"]())[: -len("/chat/completions")] + "/models",
headers={"Authorization": "Bearer " + regist["SECRET_KEY"]().split("|")[0]},
proxies=getproxy(("fanyi", typename)),
).json()
try:
return [_["id"] for _ in js["data"]]
except:
raise Exception(js)
class gptcommon(basetrans):
def langmap(self):
return {
"zh": "Simplified Chinese",
"ja": "Japanese",
"en": "English",
"ru": "Russian",
"es": "Spanish",
"ko": "Korean",
"fr": "French",
"cht": "Traditional Chinese",
"vi": "Vietnamese",
"tr": "Turkish",
"pl": "Polish",
"uk": "Ukrainian",
"it": "Italian",
"ar": "Arabic",
"th": "Thai",
}
return createenglishlangmap()
def __init__(self, typename):
self.context = []
@ -52,28 +50,10 @@ class gptcommon(basetrans):
return data
def createheaders(self):
return {"Authorization": "Bearer " + self.multiapikeycurrent["SECRET_KEY"]}
def checkv1(self, api_url: str):
# 傻逼豆包大模型是非要v3不是v1
if api_url.endswith("/v3"):
return api_url
elif api_url.endswith("/v3/"):
return api_url[:-1]
# 智谱AI
elif api_url.endswith("/v4"):
return api_url
elif api_url.endswith("/v4/"):
return api_url[:-1]
# 正常的
elif api_url.endswith("/v1"):
return api_url
elif api_url.endswith("/v1/"):
return api_url[:-1]
elif api_url.endswith("/"):
return api_url + "v1"
else:
return api_url + "/v1"
_ = {"Authorization": "Bearer " + self.multiapikeycurrent["SECRET_KEY"]}
if "openai.azure.com/openai/deployments/" in self.config["API接口地址"]:
_.update({"api-key": self.multiapikeycurrent["SECRET_KEY"]})
return _
def commonparseresponse(self, query, response: requests.ResponseBase, usingstream):
if usingstream:
@ -146,9 +126,6 @@ class gptcommon(basetrans):
return self.commonparseresponse(query, response, usingstream)
def createurl(self):
url = self.config["API接口地址"]
if url.endswith("/chat/completions"):
pass
else:
url = self.checkv1(url) + "/chat/completions"
return url
if "openai.azure.com/openai/deployments/" in self.config["API接口地址"]:
return self.config["API接口地址"]
return createurl(self.config["API接口地址"])

View File

@ -212,7 +212,11 @@ class TS(basetrans):
gpt_dict = query["gpt_dict"]
contentraw = query["contentraw"]
query = query["text"]
if (gpt_dict is not None) and len(gpt_dict):
if (
(gpt_dict is not None)
and len(gpt_dict)
and self.config["prompt_version"] == 1
):
query = contentraw
self.checkempty(["API接口地址"])
self.get_client(self.config["API接口地址"])

View File

@ -1,6 +1,6 @@
from translator.basetranslator import basetrans
import json
from myutils.utils import createenglishlangmap
from datetime import datetime
import hashlib, sys, hmac, time, json
@ -90,23 +90,7 @@ def _build_req_with_tc3_signature(key, _id, action, params, options=None):
class TS(basetrans):
def langmap(self):
return {
"zh": "Simplified Chinese",
"ja": "Japanese",
"en": "English",
"ru": "Russian",
"es": "Spanish",
"ko": "Korean",
"fr": "French",
"cht": "Traditional Chinese",
"vi": "Vietnamese",
"tr": "Turkish",
"pl": "Polish",
"uk": "Ukrainian",
"it": "Italian",
"ar": "Arabic",
"th": "Thai",
}
return createenglishlangmap()
def __init__(self, typename):
self.context = []

View File

@ -1931,13 +1931,6 @@
"color": "blue",
"name": "有道api"
},
"azureopenai": {
"type": "api",
"use": false,
"color": "blue",
"name": "Azure",
"is_gpt_like": true
},
"cohere": {
"type": "api",
"use": false,

View File

@ -269,17 +269,26 @@
"url": "https://generativelanguage.googleapis.com/v1",
"model": "gemini-1.5-flash",
"use_custom_prompt": false,
"modellistcache": [],
"custom_prompt": ""
},
"argstype": {
"custom_prompt": {
"name": "自定义promt",
"name": "自定义_promt",
"type": "multiline",
"refswitch": "use_custom_prompt"
},
"use_custom_prompt": {
"type": "switch_ref",
"name": "使用自定义promt"
},
"model": {
"type": "lineedit_or_combo",
"list_function": "list_models",
"list_cache": "modellistcache"
},
"modellistcache": {
"type": "list_cache"
}
}
},
@ -294,11 +303,13 @@
"top_p": 0.3,
"max_tokens": 128,
"frequency_penalty": 0,
"modellistcache": [],
"s": ""
},
"argstype": {
"SECRET_KEY": {
"rank": 0.5
"rank": 0.5,
"name": "API Key"
},
"s": {
"type": "split",
@ -309,7 +320,13 @@
"rank": 0
},
"model": {
"rank": 1
"rank": 1,
"type": "lineedit_or_combo",
"list_function": "list_models",
"list_cache": "modellistcache"
},
"modellistcache": {
"type": "list_cache"
},
"top_p": {
"type": "spin",
@ -330,7 +347,7 @@
"step": 1
},
"custom_prompt": {
"name": "自定义promt",
"name": "自定义_prompt",
"type": "multiline",
"refswitch": "use_custom_prompt"
},

View File

@ -116,6 +116,30 @@
"pt",
"hu"
],
"language_list_translator_inner_english": [
"Simplified Chinese",
"Japanese",
"Traditional Chinese",
"English",
"Russian",
"Spanish",
"Korean",
"French",
"Vietnamese",
"Turkish",
"Polish",
"Ukrainian",
"Italian",
"Arabic",
"Thai",
"Tibetan",
"German",
"Swedish",
"Dutch",
"Czech",
"Portuguese",
"Hungarian"
],
"font_type_default": {
"default": [
"Segoe UI",

View File

@ -209,100 +209,6 @@
}
}
},
"azureopenai": {
"args": {
"api-key": "",
"Temperature": 0.3,
"top_p": 0.3,
"max_tokens": 128,
"frequency_penalty": 0,
"model": "gpt-3.5-turbo",
"附带上下文个数": 0,
"endpoint": "https://your-resource-name.openai.azure.com",
"deployment-id": "",
"api-version": "2024-06-01",
"使用自定义promt": false,
"自定义promt": "",
"流式输出": true,
"user_user_prompt": "{sentence}",
"use_user_user_prompt": false,
"other_args": "{}",
"use_other_args": false,
"s": ""
},
"argstype": {
"other_args": {
"type": "multiline",
"refswitch": "use_other_args",
"name": "其他参数"
},
"user_user_prompt": {
"name": "自定义_user message",
"refswitch": "use_user_user_prompt",
"rank": 5.1
},
"s": {
"type": "split",
"rank": 2.5
},
"endpoint": {
"rank": 0
},
"deployment-id": {
"rank": 1
},
"api-version": {
"rank": 1.5
},
"model": {
"rank": 2
},
"top_p": {
"type": "spin",
"min": 0,
"max": 1,
"step": 0.01
},
"frequency_penalty": {
"type": "spin",
"min": 0,
"max": 2,
"step": 0.05
},
"自定义promt": {
"name": "自定义_system prompt",
"type": "multiline",
"refswitch": "使用自定义promt",
"rank": 5
},
"max_tokens": {
"type": "intspin",
"min": 1,
"max": 4096,
"step": 1
},
"流式输出": {
"type": "switch",
"rank": 3
},
"api-key": {
"rank": 1.6
},
"附带上下文个数": {
"type": "intspin",
"min": 0,
"max": 10,
"step": 1,
"rank": 4.9
},
"Temperature": {
"type": "spin",
"min": 0,
"max": 1,
"step": 0.1
}
}
},
"txhunyuan": {
"args": {
"secret_id": "",
@ -339,7 +245,18 @@
"name": "SecretKey"
},
"model": {
"rank": 2
"rank": 2,
"type": "lineedit_or_combo",
"list": [
"hunyuan-lite",
"hunyuan-turbo",
"hunyuan-pro",
"hunyuan-standard",
"hunyuan-standard-256k",
"hunyuan-role",
"hunyuan-functioncall",
"hunyuan-code"
]
},
"top_p": {
"type": "spin",
@ -382,7 +299,7 @@
"claude": {
"args": {
"BASE_URL": "https://api.anthropic.com",
"model": "claude-3-opus-20240229",
"model": "claude-3-5-sonnet-20240620",
"API_KEY": "",
"max_tokens": 2048,
"Temperature": 0.3,
@ -415,7 +332,14 @@
"rank": 0
},
"model": {
"rank": 1
"rank": 1,
"type": "lineedit_or_combo",
"list": [
"claude-3-5-sonnet-20240620",
"claude-3-opus-20240229",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307"
]
},
"API_KEY": {
"rank": 0.5
@ -454,6 +378,7 @@
"chatgpt-3rd-party": {
"args": {
"model": "gpt-3.5-turbo",
"modellistcache": [],
"附带上下文个数": 0,
"API接口地址": "https://api.openai.com",
"SECRET_KEY": "",
@ -489,10 +414,17 @@
"rank": 0
},
"SECRET_KEY": {
"rank": 1
"rank": 1,
"name": "API Key"
},
"model": {
"rank": 2
"rank": 2,
"type": "lineedit_or_combo",
"list_function": "list_models",
"list_cache": "modellistcache"
},
"modellistcache": {
"type": "list_cache"
},
"top_p": {
"type": "spin",
@ -539,7 +471,7 @@
},
"baiduqianfan": {
"args": {
"model": "ernie-3.5-8k-0329",
"model": "ernie-4.0-8k",
"context_num": 0,
"API_KEY": "",
"SECRET_KEY": "",
@ -575,10 +507,23 @@
"rank": 0
},
"SECRET_KEY": {
"rank": 1
"rank": 1,
"name": "API Key"
},
"model": {
"rank": 2
"rank": 2,
"type": "lineedit_or_combo",
"list": [
"ernie-4.0-8k",
"ernie-4.0-turbo-8k",
"ernie-3.5-128k",
"ernie-3.5-8k",
"ernie-speed-pro-128k",
"ernie-speed-128k",
"ernie-speed-8k",
"ernie-lite-8k",
"ernie-tiny-8k"
]
},
"top_p": {
"type": "spin",
@ -629,6 +574,7 @@
"args": {
"SECRET_KEY": "",
"Temperature": 0.3,
"modellistcache": [],
"model": "command-r",
"附带上下文个数": 0,
"使用自定义promt": false,
@ -656,10 +602,17 @@
"rank": 1.5
},
"SECRET_KEY": {
"rank": 0
"rank": 0,
"name": "API Key"
},
"model": {
"rank": 1
"rank": 1,
"type": "lineedit_or_combo",
"list_function": "list_models",
"list_cache": "modellistcache"
},
"modellistcache": {
"type": "list_cache"
},
"自定义promt": {
"type": "multiline",
@ -1000,6 +953,7 @@
"SECRET_KEY": "",
"Temperature": 0.3,
"model": "gemini-1.5-flash",
"modellistcache": [],
"context": 0,
"use_custom_prompt": false,
"custom_prompt": "",
@ -1045,10 +999,17 @@
"rank": 4.9
},
"SECRET_KEY": {
"rank": 2
"rank": 2,
"name": "API Key"
},
"model": {
"rank": 3
"rank": 3,
"type": "lineedit_or_combo",
"list_function": "list_models",
"list_cache": "modellistcache"
},
"modellistcache": {
"type": "list_cache"
},
"注册网址": {
"type": "label",
@ -1255,7 +1216,8 @@
"rank": 5.1
},
"SECRET_KEY": {
"rank": 3
"rank": 3,
"name": "API Key"
},
"s": {
"type": "split",

View File

@ -13,6 +13,8 @@
<img src="https://image.lunatranslator.org/zh/damoxing/extraapi3.png">
</details>
>**model**可以在下拉列表中选取,如果列表中没有也可以参照接口官方文档手动填写/修改。<br>
>部分接口可以根据**API接口地址**和**API Key**动态获取模型列表,填好这两项后点击**model**旁的刷新按钮即可获取可用的模型列表。
### ChatGPT兼容接口
@ -27,7 +29,7 @@
**API接口地址** `https://api.groq.com/openai/v1/chat/completions`
**SECRET_KEY** https://console.groq.com/keys
**API Key** https://console.groq.com/keys
**model** https://console.groq.com/docs/models 填写`Model ID`
@ -35,7 +37,7 @@
**API接口地址** `https://openrouter.ai/api/v1/chat/completions`
**SECRET_KEY** https://openrouter.ai/settings/keys
**API Key** https://openrouter.ai/settings/keys
**model** https://openrouter.ai/docs/models
@ -43,10 +45,14 @@
**API接口地址** `https://api.deepbricks.ai/v1/chat/completions`
**SECRET_KEY** https://deepbricks.ai/api-key
**API Key** https://deepbricks.ai/api-key
**model** https://deepbricks.ai/pricing
### **Azure**
https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#completions
<!-- tabs:end -->
#### 国产大模型接口
@ -58,7 +64,7 @@
**API接口地址** `https://api.deepseek.com`
**SECRET_KEY** https://platform.deepseek.com/api_keys
**API Key** https://platform.deepseek.com/api_keys
**model** https://platform.deepseek.com/api-docs/zh-cn/pricing
@ -66,7 +72,7 @@
**API接口地址** `https://dashscope.aliyuncs.com/compatible-mode/v1`
**SECRET_KEY** https://bailian.console.aliyun.com/?apiKey=1#/api-key
**API Key** https://bailian.console.aliyun.com/?apiKey=1#/api-key
**model** https://help.aliyun.com/zh/model-studio/product-overview/billing-for-alibaba-cloud-model-studio/#2550bcc04d2tk
@ -74,7 +80,7 @@
**API接口地址** `https://ark.cn-beijing.volces.com/api/v3`
**SECRET_KEY** [创建API Key](https://console.volcengine.com/ark/region:ark+cn-beijing/apiKey?apikey=%7B%7D)获取
**API Key** [创建API Key](https://console.volcengine.com/ark/region:ark+cn-beijing/apiKey?apikey=%7B%7D)获取
**model** [创建推理接入点](https://console.volcengine.com/ark/region:ark+cn-beijing/endpoint?current=1&pageSize=10)后,填入**接入点**而非**模型**
@ -85,7 +91,7 @@
**API接口地址** `https://api.moonshot.cn`
**SECRET_KEY** https://platform.moonshot.cn/console/api-keys
**API Key** https://platform.moonshot.cn/console/api-keys
**model** https://platform.moonshot.cn/docs/intro
@ -93,7 +99,7 @@
**API接口地址** `https://open.bigmodel.cn/api/paas/v4/chat/completions`
**SECRET_KEY** https://bigmodel.cn/usercenter/apikeys
**API Key** https://bigmodel.cn/usercenter/apikeys
**model** https://bigmodel.cn/dev/howuse/model
@ -101,7 +107,7 @@
**API接口地址** `https://api.lingyiwanwu.com`
**SECRET_KEY** https://platform.lingyiwanwu.com/apikeys
**API Key** https://platform.lingyiwanwu.com/apikeys
**model** https://platform.lingyiwanwu.com/docs/api-reference#list-models
@ -109,7 +115,7 @@
**API接口地址** `https://api.siliconflow.cn`
**SECRET_KEY** https://cloud-hk.siliconflow.cn/account/ak
**API Key** https://cloud-hk.siliconflow.cn/account/ak
**model** https://docs.siliconflow.cn/docs/model-names
@ -117,7 +123,7 @@
**API接口地址** `https://spark-api-open.xf-yun.com/v1`
**SECRET_KEY** 参考[官方文档](https://www.xfyun.cn/doc/spark/HTTP%E8%B0%83%E7%94%A8%E6%96%87%E6%A1%A3.html#_3-%E8%AF%B7%E6%B1%82%E8%AF%B4%E6%98%8E)获取**APIKey**和**APISecret**后,按照**APIKey:APISecret**的格式填入
**API Key** 参考[官方文档](https://www.xfyun.cn/doc/spark/HTTP%E8%B0%83%E7%94%A8%E6%96%87%E6%A1%A3.html#_3-%E8%AF%B7%E6%B1%82%E8%AF%B4%E6%98%8E)获取**APIKey**和**APISecret**后,按照**APIKey:APISecret**的格式填入
**model** https://www.xfyun.cn/doc/spark/HTTP%E8%B0%83%E7%94%A8%E6%96%87%E6%A1%A3.html#_3-2-%E8%AF%B7%E6%B1%82%E5%8F%82%E6%95%B0
@ -137,7 +143,7 @@
**model** https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models
**SECRET_KEY** https://aistudio.google.com/app/apikey
**API Key** https://aistudio.google.com/app/apikey
### **claude**
@ -149,13 +155,10 @@
### **cohere**
**SECRET_KEY** https://dashboard.cohere.com/api-keys
**API Key** https://dashboard.cohere.com/api-keys
**model** https://docs.cohere.com/docs/models
### **Azure**
https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#completions
<!-- tabs:end -->

View File

@ -28,8 +28,8 @@ set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_SOURCE_DIR}/version)
include(generate_product_version)
set(VERSION_MAJOR 5)
set(VERSION_MINOR 33)
set(VERSION_PATCH 11)
set(VERSION_MINOR 34)
set(VERSION_PATCH 0)
add_library(pch pch.cpp)
target_precompile_headers(pch PUBLIC pch.h)