mirror of
https://github.com/HIllya51/LunaTranslator.git
synced 2024-12-29 16:44:13 +08:00
gpt
This commit is contained in:
parent
278340a019
commit
d7d09467c1
118
LunaTranslator/LunaTranslator/ocrengines/chatgptlike.py
Normal file
118
LunaTranslator/LunaTranslator/ocrengines/chatgptlike.py
Normal file
@ -0,0 +1,118 @@
|
||||
from ocrengines.baseocrclass import baseocr
|
||||
import base64
|
||||
|
||||
|
||||
class OCR(baseocr):
|
||||
|
||||
def langmap(self):
|
||||
return {
|
||||
"zh": "Simplified Chinese",
|
||||
"ja": "Japanese",
|
||||
"en": "English",
|
||||
"ru": "Russian",
|
||||
"es": "Spanish",
|
||||
"ko": "Korean",
|
||||
"fr": "French",
|
||||
"cht": "Traditional Chinese",
|
||||
"vi": "Vietnamese",
|
||||
"tr": "Turkish",
|
||||
"pl": "Polish",
|
||||
"uk": "Ukrainian",
|
||||
"it": "Italian",
|
||||
"ar": "Arabic",
|
||||
"th": "Thai",
|
||||
}
|
||||
|
||||
def createdata(self, message):
|
||||
try:
|
||||
temperature = float(self.config["Temperature"])
|
||||
except:
|
||||
temperature = 0.3
|
||||
|
||||
data = dict(
|
||||
model=self.config["model"],
|
||||
messages=message,
|
||||
# optional
|
||||
max_tokens=self.config["max_tokens"],
|
||||
n=1,
|
||||
# stop=None,
|
||||
top_p=self.config["top_p"],
|
||||
temperature=temperature,
|
||||
frequency_penalty=self.config["frequency_penalty"],
|
||||
)
|
||||
return data
|
||||
|
||||
def createparam(self):
|
||||
return None
|
||||
|
||||
def createheaders(self):
|
||||
return {"Authorization": "Bearer " + self.config["SECRET_KEY"]}
|
||||
|
||||
def checkv1(self, api_url: str):
|
||||
# 傻逼豆包大模型是非要v3,不是v1
|
||||
if api_url.endswith("/v3"):
|
||||
return api_url
|
||||
elif api_url.endswith("/v3/"):
|
||||
return api_url[:-1]
|
||||
# 智谱AI
|
||||
elif api_url.endswith("/v4"):
|
||||
return api_url
|
||||
elif api_url.endswith("/v4/"):
|
||||
return api_url[:-1]
|
||||
# 正常的
|
||||
elif api_url.endswith("/v1"):
|
||||
return api_url
|
||||
elif api_url.endswith("/v1/"):
|
||||
return api_url[:-1]
|
||||
elif api_url.endswith("/"):
|
||||
return api_url + "v1"
|
||||
else:
|
||||
return api_url + "/v1"
|
||||
|
||||
def ocr(self, imagebinary):
|
||||
|
||||
if self.config["use_custom_prompt"]:
|
||||
prompt = self.config["custom_prompt"]
|
||||
else:
|
||||
prompt = f"Recognize the {self.srclang} text in the picture."
|
||||
|
||||
base64_image = base64.b64encode(imagebinary).decode("utf-8")
|
||||
message = [
|
||||
{
|
||||
"role": "user",
|
||||
"content": [
|
||||
{"type": "text", "text": prompt},
|
||||
{
|
||||
"type": "image_url",
|
||||
"image_url": {
|
||||
"url": f"data:image/jpeg;base64,{base64_image}",
|
||||
"detail": "low",
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
]
|
||||
|
||||
response = self.proxysession.post(
|
||||
self.createurl(),
|
||||
headers=self.createheaders(),
|
||||
params=self.createparam(),
|
||||
json=self.createdata(message),
|
||||
)
|
||||
try:
|
||||
message = (
|
||||
response.json()["choices"][0]["message"]["content"]
|
||||
.replace("\n\n", "\n")
|
||||
.strip()
|
||||
)
|
||||
return message
|
||||
except:
|
||||
raise Exception(response.text)
|
||||
|
||||
def createurl(self):
|
||||
url = self.config["apiurl"]
|
||||
if url.endswith("/chat/completions"):
|
||||
pass
|
||||
else:
|
||||
url = self.checkv1(url) + "/chat/completions"
|
||||
return url
|
@ -4,6 +4,25 @@ from ocrengines.baseocrclass import baseocr
|
||||
|
||||
|
||||
class OCR(baseocr):
|
||||
def langmap(self):
|
||||
return {
|
||||
"zh": "Simplified Chinese",
|
||||
"ja": "Japanese",
|
||||
"en": "English",
|
||||
"ru": "Russian",
|
||||
"es": "Spanish",
|
||||
"ko": "Korean",
|
||||
"fr": "French",
|
||||
"cht": "Traditional Chinese",
|
||||
"vi": "Vietnamese",
|
||||
"tr": "Turkish",
|
||||
"pl": "Polish",
|
||||
"uk": "Ukrainian",
|
||||
"it": "Italian",
|
||||
"ar": "Arabic",
|
||||
"th": "Thai",
|
||||
}
|
||||
|
||||
def ocr(self, imagebinary):
|
||||
self.checkempty(["key"])
|
||||
self.checkempty(["url"])
|
||||
@ -13,12 +32,15 @@ class OCR(baseocr):
|
||||
model = self.config["model"]
|
||||
image_data = base64.b64encode(imagebinary).decode("utf-8")
|
||||
|
||||
# Prepare the request payload
|
||||
if self.config["use_custom_prompt"]:
|
||||
prompt = self.config["custom_prompt"]
|
||||
else:
|
||||
prompt = f"Recognize the {self.srclang} text in the picture."
|
||||
payload = {
|
||||
"contents": [
|
||||
{
|
||||
"parts": [
|
||||
{"text": "Ocr this picture"},
|
||||
{"text": prompt},
|
||||
{"inlineData": {"mimeType": "image/png", "data": image_data}},
|
||||
]
|
||||
}
|
||||
|
@ -1471,6 +1471,10 @@
|
||||
"geminiocr": {
|
||||
"use": false,
|
||||
"name": "GeminiOCR"
|
||||
},
|
||||
"chatgptlike": {
|
||||
"use": false,
|
||||
"name": "ChatGPT_兼容接口_OCR"
|
||||
}
|
||||
},
|
||||
"fanyi": {
|
||||
|
@ -272,7 +272,83 @@
|
||||
"args": {
|
||||
"key": "",
|
||||
"url": "https://generativelanguage.googleapis.com/v1",
|
||||
"model": "gemini-1.5-flash"
|
||||
"model": "gemini-1.5-flash",
|
||||
"use_custom_prompt": false,
|
||||
"custom_prompt": ""
|
||||
},
|
||||
"argstype": {
|
||||
"custom_prompt": {
|
||||
"name": "自定义promt",
|
||||
"type": "multiline",
|
||||
"refswitch": "use_custom_prompt"
|
||||
},
|
||||
"use_custom_prompt": {
|
||||
"type": "switch_ref",
|
||||
"name": "使用自定义promt"
|
||||
}
|
||||
}
|
||||
},
|
||||
"chatgptlike": {
|
||||
"args": {
|
||||
"model": "gpt-4",
|
||||
"apiurl": "https://api.openai.com/v1",
|
||||
"SECRET_KEY": "",
|
||||
"use_custom_prompt": false,
|
||||
"custom_prompt": "",
|
||||
"Temperature": 0.3,
|
||||
"top_p": 0.3,
|
||||
"max_tokens": 128,
|
||||
"frequency_penalty": 0,
|
||||
"s": ""
|
||||
},
|
||||
"argstype": {
|
||||
"SECRET_KEY": {
|
||||
"rank": 1.1
|
||||
},
|
||||
"s": {
|
||||
"type": "split",
|
||||
"rank": 1.5
|
||||
},
|
||||
"apiurl": {
|
||||
"name": "API接口地址",
|
||||
"rank": 0
|
||||
},
|
||||
"model": {
|
||||
"rank": 1
|
||||
},
|
||||
"top_p": {
|
||||
"type": "spin",
|
||||
"min": 0,
|
||||
"max": 1,
|
||||
"step": 0.01
|
||||
},
|
||||
"frequency_penalty": {
|
||||
"type": "spin",
|
||||
"min": 0,
|
||||
"max": 2,
|
||||
"step": 0.05
|
||||
},
|
||||
"max_tokens": {
|
||||
"type": "intspin",
|
||||
"min": 1,
|
||||
"max": 4096,
|
||||
"step": 1
|
||||
},
|
||||
"custom_prompt": {
|
||||
"name": "自定义promt",
|
||||
"type": "multiline",
|
||||
"refswitch": "use_custom_prompt"
|
||||
},
|
||||
"use_custom_prompt": {
|
||||
"type": "switch_ref",
|
||||
"name": "使用自定义promt"
|
||||
},
|
||||
"Temperature": {
|
||||
"type": "spin",
|
||||
"min": 0,
|
||||
"max": 1,
|
||||
"step": 0.1
|
||||
}
|
||||
}
|
||||
},
|
||||
"xunfei": {
|
||||
|
@ -363,7 +363,7 @@
|
||||
"rank": 1
|
||||
},
|
||||
"API_KEY": {
|
||||
"rank": 2
|
||||
"rank": 0.5
|
||||
},
|
||||
"流式输出": {
|
||||
"type": "switch",
|
||||
|
@ -29,7 +29,7 @@ include(generate_product_version)
|
||||
|
||||
set(VERSION_MAJOR 5)
|
||||
set(VERSION_MINOR 15)
|
||||
set(VERSION_PATCH 10)
|
||||
set(VERSION_PATCH 11)
|
||||
|
||||
add_library(pch pch.cpp)
|
||||
target_precompile_headers(pch PUBLIC pch.h)
|
||||
|
Loading…
x
Reference in New Issue
Block a user