mirror of
https://github.com/HIllya51/LunaTranslator.git
synced 2024-12-29 00:24:13 +08:00
.
This commit is contained in:
parent
4a4503dcc6
commit
7afd0ea856
@ -1,5 +1,5 @@
|
||||
from qtsymbols import *
|
||||
import os, functools
|
||||
import os, functools, re
|
||||
from myutils.config import globalconfig
|
||||
from myutils.utils import splittranslatortypes
|
||||
from gui.usefulwidget import (
|
||||
@ -37,16 +37,37 @@ def getall(l, item="fanyi", name=None):
|
||||
return grids
|
||||
|
||||
|
||||
def validator(self, text):
|
||||
regExp = re.compile(r"^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3}):(\d{1,5})$")
|
||||
mch = regExp.match(text)
|
||||
for _ in (1,):
|
||||
|
||||
if not mch:
|
||||
break
|
||||
_1, _2, _3, _4, _p = [int(_) for _ in mch.groups()]
|
||||
if _p > 65535:
|
||||
break
|
||||
if any([_ > 255 for _ in [_1, _2, _3, _4]]):
|
||||
break
|
||||
globalconfig["proxy"] = text
|
||||
self.createproxyedit_check.setText("")
|
||||
return
|
||||
self.createproxyedit_check.setText("Invalid")
|
||||
|
||||
|
||||
def createproxyedit(self):
|
||||
proxy = QLineEdit(globalconfig["proxy"])
|
||||
self.__proxyedit = proxy
|
||||
proxy.textEdited.connect(
|
||||
lambda: globalconfig.__setitem__("proxy", self.__proxyedit.text())
|
||||
)
|
||||
proxy.textChanged.connect(functools.partial(validator, self))
|
||||
_ifusesysproxy(self, globalconfig["usesysproxy"])
|
||||
return proxy
|
||||
|
||||
|
||||
def createproxyedit_check(self):
|
||||
self.createproxyedit_check = QLabel()
|
||||
return self.createproxyedit_check
|
||||
|
||||
|
||||
def _ifusesysproxy(self, x):
|
||||
self.__proxyedit.setEnabled(not x)
|
||||
|
||||
@ -73,7 +94,11 @@ def makeproxytab(self, basel):
|
||||
)
|
||||
),
|
||||
],
|
||||
[("手动设置代理(ip:port)", 5), (functools.partial(createproxyedit, self), 5)],
|
||||
[
|
||||
("手动设置代理(ip:port)", 5),
|
||||
(functools.partial(createproxyedit, self), 5),
|
||||
(functools.partial(createproxyedit_check, self), 5),
|
||||
],
|
||||
[],
|
||||
[("使用代理的项目", -1)],
|
||||
]
|
||||
|
@ -60,11 +60,15 @@ def splitapillm(l):
|
||||
return is_gpt_likes, not_is_gpt_like
|
||||
|
||||
|
||||
def loadvisinternal(skipid=False, skipidid=None):
|
||||
def loadvisinternal(btnplus):
|
||||
__vis = []
|
||||
__uid = []
|
||||
lixians, pre, mianfei, develop, shoufei = splittranslatortypes()
|
||||
if btnplus == "api":
|
||||
is_gpt_likes, not_is_gpt_like = splitapillm(shoufei)
|
||||
elif btnplus == "offline":
|
||||
is_gpt_likes, not_is_gpt_like = splitapillm(lixians)
|
||||
|
||||
for _ in is_gpt_likes:
|
||||
_f = "./Lunatranslator/translator/{}.py".format(_)
|
||||
if not os.path.exists(_f):
|
||||
@ -74,9 +78,9 @@ def loadvisinternal(skipid=False, skipidid=None):
|
||||
return __vis, __uid
|
||||
|
||||
|
||||
def getalistname(parent, callback, skipid=False, skipidid=None):
|
||||
def getalistname(parent, btnplus, callback):
|
||||
__d = {"k": 0, "n": ""}
|
||||
__vis, __uid = loadvisinternal(skipid, skipidid)
|
||||
__vis, __uid = loadvisinternal(btnplus)
|
||||
|
||||
def __wrap(callback, __d, __uid):
|
||||
if len(__uid) == 0:
|
||||
@ -111,7 +115,7 @@ def getalistname(parent, callback, skipid=False, skipidid=None):
|
||||
)
|
||||
|
||||
|
||||
def selectllmcallback(self, countnum, btn, fanyi, name):
|
||||
def selectllmcallback(self, countnum, btn, btnplus, fanyi, name):
|
||||
uid = str(uuid.uuid4())
|
||||
_f1 = "./Lunatranslator/translator/{}.py".format(fanyi)
|
||||
_f2 = "./Lunatranslator/translator/{}.py".format(uid)
|
||||
@ -122,10 +126,11 @@ def selectllmcallback(self, countnum, btn, fanyi, name):
|
||||
if not name:
|
||||
name = globalconfig["fanyi"][fanyi]["name"] + "_copy"
|
||||
globalconfig["fanyi"][uid]["name"] = name
|
||||
globalconfig["fanyi"][uid]["type"] = btnplus
|
||||
if fanyi in translatorsetting:
|
||||
translatorsetting[uid] = deepcopydict(translatorsetting[fanyi])
|
||||
|
||||
layout: QGridLayout = self.damoxinggridinternal
|
||||
layout: QGridLayout = getattr(self, "damoxinggridinternal" + btnplus)
|
||||
|
||||
items = autoinitdialog_items(translatorsetting[uid])
|
||||
last = getIconButton(
|
||||
@ -175,20 +180,29 @@ def selectllmcallback(self, countnum, btn, fanyi, name):
|
||||
countnum.append(0)
|
||||
|
||||
|
||||
def btnpluscallback(self, countnum, btn):
|
||||
getalistname(self, functools.partial(selectllmcallback, self, countnum, btn))
|
||||
def btnpluscallback(self, countnum, btn, btnplus):
|
||||
getalistname(
|
||||
self,
|
||||
btnplus,
|
||||
functools.partial(selectllmcallback, self, countnum, btn, btnplus),
|
||||
)
|
||||
|
||||
|
||||
def createbtn(self, countnum):
|
||||
def createbtn(self, countnum, btnplus):
|
||||
btn = QPushButton(self)
|
||||
btn.setIcon(qtawesome.icon("fa.plus"))
|
||||
btn.clicked.connect(functools.partial(btnpluscallback, self, countnum, btn))
|
||||
btn.clicked.connect(
|
||||
functools.partial(btnpluscallback, self, countnum, btn, btnplus)
|
||||
)
|
||||
return btn
|
||||
|
||||
|
||||
def createbtnquest(self):
|
||||
def createbtnquest(self, btnplus):
|
||||
btn = QPushButton(self)
|
||||
btn.setIcon(qtawesome.icon("fa.question"))
|
||||
if btnplus == "offline":
|
||||
btn.clicked.connect(lambda: os.startfile(dynamiclink("{docs_server}/#/zh/offlinellm")))
|
||||
elif btnplus == "api":
|
||||
btn.clicked.connect(
|
||||
lambda: os.startfile(dynamiclink("{docs_server}/#/zh/guochandamoxing"))
|
||||
)
|
||||
@ -266,18 +280,18 @@ def initsome11(self, l, label=None, btnplus=False):
|
||||
grids.append(
|
||||
[
|
||||
("", 10),
|
||||
(functools.partial(createbtn, self, countnum), 2),
|
||||
(functools.partial(createbtnquest, self), 2),
|
||||
(functools.partial(createbtn, self, countnum, btnplus), 2),
|
||||
(functools.partial(createbtnquest, self, btnplus), 2),
|
||||
]
|
||||
)
|
||||
|
||||
return grids
|
||||
|
||||
|
||||
def initsome2(self, l, label=None):
|
||||
def initsome2(self, l, label=None, btnplus=None):
|
||||
is_gpt_likes, not_is_gpt_like = splitapillm(l)
|
||||
not_is_gpt_like = initsome11(self, not_is_gpt_like, label)
|
||||
is_gpt_likes = initsome11(self, is_gpt_likes, label, btnplus=True)
|
||||
is_gpt_likes = initsome11(self, is_gpt_likes, label, btnplus=btnplus)
|
||||
grids = [
|
||||
[
|
||||
(
|
||||
@ -292,7 +306,7 @@ def initsome2(self, l, label=None):
|
||||
type="grid",
|
||||
title="大模型",
|
||||
grid=is_gpt_likes,
|
||||
internallayoutname="damoxinggridinternal",
|
||||
internallayoutname="damoxinggridinternal" + btnplus,
|
||||
parent=self,
|
||||
),
|
||||
0,
|
||||
@ -469,10 +483,10 @@ def setTabTwo_lazy(self, basel):
|
||||
]
|
||||
lixians, pre, mianfei, develop, shoufei = splittranslatortypes()
|
||||
|
||||
offlinegrid = initsome11(self, lixians)
|
||||
offlinegrid = initsome2(self, lixians, btnplus="offline")
|
||||
onlinegrid = initsome11(self, mianfei)
|
||||
developgrid += initsome11(self, develop)
|
||||
online_reg_grid += initsome2(self, shoufei)
|
||||
online_reg_grid += initsome2(self, shoufei, btnplus="api")
|
||||
pretransgrid += initsome11(self, pre)
|
||||
vw, vl = getvboxwidget()
|
||||
basel.addWidget(vw)
|
||||
|
@ -1563,6 +1563,7 @@
|
||||
"use": false,
|
||||
"color": "blue",
|
||||
"name": "DeepLX",
|
||||
"type": "offline",
|
||||
"useproxy": false
|
||||
},
|
||||
"dev_chatgpt": {
|
||||
|
@ -818,8 +818,6 @@
|
||||
},
|
||||
"TGW": {
|
||||
"args": {
|
||||
"TGW懒人包": "https://www.bilibili.com/video/BV1Te411U7me",
|
||||
"TGW懒人包1": "https://pan.baidu.com/s/1fe7iiHIAtoXW80Twsrv8Nw?pwd=pato",
|
||||
"Github仓库": "https://github.com/oobabooga/text-generation-webui",
|
||||
"API接口地址(默认为http://127.0.0.1:5000/)": "http://127.0.0.1:5000/",
|
||||
"API超时(秒)": 30,
|
||||
@ -844,14 +842,6 @@
|
||||
"frequency_penalty": 0
|
||||
},
|
||||
"argstype": {
|
||||
"TGW懒人包": {
|
||||
"type": "label",
|
||||
"islink": true
|
||||
},
|
||||
"TGW懒人包1": {
|
||||
"type": "label",
|
||||
"islink": true
|
||||
},
|
||||
"Github仓库": {
|
||||
"type": "label",
|
||||
"islink": true
|
||||
|
39
docs/zh/offlinellm.md
Normal file
39
docs/zh/offlinellm.md
Normal file
@ -0,0 +1,39 @@
|
||||
## 如何使用大模型离线翻译?
|
||||
|
||||
### Sakura大模型
|
||||
|
||||
> 这是最推荐使用的,配置最简单,效果最好,也可以纯cpu运行轻量模型
|
||||
|
||||
具体部署方法可参考 https://github.com/SakuraLLM/SakuraLLM/wiki
|
||||
|
||||
### TGW
|
||||
|
||||
可参考 [text-generation-webui](https://github.com/oobabooga/text-generation-webui)进行部署,或使用[懒人包](https://pan.baidu.com/s/1fe7iiHIAtoXW80Twsrv8Nw?pwd=pato)+[非官方教程](https://www.bilibili.com/video/BV1Te411U7me)
|
||||
|
||||
!> 看B站UP的主的视频弄出了问题别来找我,找UP主去。
|
||||
|
||||
### ChatGPT兼容接口
|
||||
|
||||
其实**Sakura大模型**和**TGW**的接口基本和**ChatGPT兼容接口**一样,只不过多了一点预设的prompt和参数而已。可以把sakura和TGW的地址和模型填到这个的参数里面使用。
|
||||
|
||||
也可以使用**oneapi**、**ollama**之类的工具进行模型的部署,然后将地址和模型填入。
|
||||
|
||||
也可以使用Kaggle之类的平台来把模型部署到云端,这时可能会需要用到SECRET_KEY,其他时候可以无视SECRET_KEY参数。
|
||||
|
||||
#### 使用ollama部署的例子 [@asukaminato0721](https://github.com/HIllya51/LunaTranslator/issues/797)
|
||||
|
||||
需要在本地或者 ~~ssh 转发端口到本地(能看懂这句话那不需要继续了)~~
|
||||
|
||||
仅提供一种解法。别的兼容 gpt api 的都可以。
|
||||
|
||||
下载 ollama https://www.ollama.com/
|
||||
|
||||
以 llama3 举例。
|
||||
|
||||
https://www.ollama.com/library/llama3
|
||||
|
||||
下载好模型,后台跑起来后,在
|
||||
|
||||
![img](https://image.lunatranslator.xyz/zh/336483101-915f17c5-27a4-465f-9b4e-7a547ba5029f.png)
|
||||
|
||||
改成自己在跑的模型,端口改成对应的。就行了。
|
@ -1,24 +0,0 @@
|
||||
## 如何使用离线 chatgpt
|
||||
|
||||
需要在本地或者 ~~ssh 转发端口到本地(能看懂这句话那不需要继续了)~~
|
||||
|
||||
仅提供一种解法。别的兼容 gpt api 的都可以。
|
||||
|
||||
下载 ollama https://www.ollama.com/
|
||||
|
||||
以 llama3 举例。
|
||||
|
||||
https://www.ollama.com/library/llama3
|
||||
|
||||
下载好模型,后台跑起来后,在
|
||||
|
||||
![img](https://image.lunatranslator.xyz/zh/336483101-915f17c5-27a4-465f-9b4e-7a547ba5029f.png)
|
||||
|
||||
改成自己在跑的模型,端口改成对应的。就行了。
|
||||
|
||||
>PS: 建议使用 https://github.com/SakuraLLM/SakuraLLM/wiki
|
||||
使用llama.cpp部署,最简单。然后使用sakura大模型的专用接口,里面有一些模型作者写的prompt。
|
||||
|
||||
<hr>
|
||||
|
||||
Reference: [asukaminato0721](https://github.com/HIllya51/LunaTranslator/issues/797)
|
@ -3,6 +3,7 @@
|
||||
- [获取软件和启动](/zh/start.md)
|
||||
- [软件更新](/zh/update.md)
|
||||
- [常见问题](/zh/qas.md)
|
||||
- [如何使用大模型离线翻译](/zh/qa3.md)
|
||||
- [如何使用内嵌翻译](/zh/embedtranslate.md)
|
||||
- [如何改善OCR模式体验](/zh/gooduseocr.md)
|
||||
- [如何使用国产大模型API接口](/zh/guochandamoxing.md)
|
||||
@ -11,6 +12,5 @@
|
||||
- [Windows OCR 如何安装额外的语言支持](/zh/windowsocr.md)
|
||||
- [如何使用 Mecab 分词&词性颜色标注](/zh/qa1.md)
|
||||
- [如何自动划词进 anki](/zh/qa2.md)
|
||||
- [如何使用离线 chatgpt](/zh/qa3.md)
|
||||
- [如何使用 gemini 免费 api](/zh/qa4.md)
|
||||
- [支持作者](/zh/support.md)
|
@ -29,7 +29,7 @@ include(generate_product_version)
|
||||
|
||||
set(VERSION_MAJOR 5)
|
||||
set(VERSION_MINOR 15)
|
||||
set(VERSION_PATCH 0)
|
||||
set(VERSION_PATCH 1)
|
||||
|
||||
add_library(pch pch.cpp)
|
||||
target_precompile_headers(pch PUBLIC pch.h)
|
||||
|
Loading…
x
Reference in New Issue
Block a user