This commit is contained in:
恍兮惚兮 2024-07-08 22:25:59 +08:00
parent 220b8be23c
commit 27b6f0e1ba
16 changed files with 424 additions and 457 deletions

View File

@ -14,9 +14,6 @@ if __name__ == "__main__":
sys.path.append("./")
sys.path.append("./userconfig")
sys.path.insert(
0, "./LunaTranslator/network/" + ["winhttp", "libcurl"][globalconfig["network"]]
)
import gobject

View File

@ -178,12 +178,7 @@ def setTab_proxy_lazy(self, basel):
"网络请求",
(
D_getsimplecombobox(
["winhttp", "libcurl"],
globalconfig,
"network",
callback=functools.partial(
gobject.baseobject.showneedrestart, "网络请求"
),
["winhttp", "libcurl"], globalconfig, "network"
),
5,
),

View File

@ -4,7 +4,7 @@ from threading import Thread
from myutils.commonbase import proxysession
from myutils.config import globalconfig, savehook_new_data
from traceback import print_exc
from network.requests_common import NetWorkException
from requests import NetWorkException
class common:

View File

@ -8,7 +8,7 @@ from myutils.config import (
from myutils.utils import initanewitem, gamdidchangedtask
import functools
import time
from network.requests_common import NetWorkException
from requests import NetWorkException
from qtsymbols import *
from gui.usefulwidget import getlineedit
from gui.dialog_savedgame import getreflist, getalistname

View File

@ -1,5 +1,5 @@
import gobject, os
from network.requests_common import NetWorkException
from requests import NetWorkException
from ctypes import (
CDLL,
c_void_p,
@ -42,94 +42,94 @@ class curl_ws_frame(Structure):
class CURLcode(c_int):
CURLE_OK = 0
CURLE_UNSUPPORTED_PROTOCOL = 1
CURLE_FAILED_INIT = 2
CURLE_URL_MALFORMAT = 3
CURLE_COULDNT_RESOLVE_PROXY = 5
CURLE_COULDNT_RESOLVE_HOST = 6
CURLE_COULDNT_CONNECT = 7
CURLE_FTP_WEIRD_SERVER_REPLY = 8
CURLE_REMOTE_ACCESS_DENIED = 9
CURLE_FTP_WEIRD_PASS_REPLY = 11
CURLE_FTP_WEIRD_PASV_REPLY = 13
CURLE_FTP_WEIRD_227_FORMAT = 14
CURLE_FTP_CANT_GET_HOST = 15
CURLE_FTP_COULDNT_SET_TYPE = 17
CURLE_PARTIAL_FILE = 18
CURLE_FTP_COULDNT_RETR_FILE = 19
CURLE_QUOTE_ERROR = 21
CURLE_HTTP_RETURNED_ERROR = 22
CURLE_WRITE_ERROR = 23
CURLE_UPLOAD_FAILED = 25
CURLE_READ_ERROR = 26
CURLE_OUT_OF_MEMORY = 27
CURLE_OPERATION_TIMEDOUT = 28
CURLE_FTP_PORT_FAILED = 30
CURLE_FTP_COULDNT_USE_REST = 31
CURLE_RANGE_ERROR = 33
CURLE_HTTP_POST_ERROR = 34
CURLE_SSL_CONNECT_ERROR = 35
CURLE_BAD_DOWNLOAD_RESUME = 36
CURLE_FILE_COULDNT_READ_FILE = 37
CURLE_LDAP_CANNOT_BIND = 38
CURLE_LDAP_SEARCH_FAILED = 39
CURLE_FUNCTION_NOT_FOUND = 41
CURLE_ABORTED_BY_CALLBACK = 42
CURLE_BAD_FUNCTION_ARGUMENT = 43
CURLE_INTERFACE_FAILED = 45
CURLE_TOO_MANY_REDIRECTS = 47
CURLE_UNKNOWN_TELNET_OPTION = 48
CURLE_TELNET_OPTION_SYNTAX = 49
CURLE_PEER_FAILED_VERIFICATION = 51
CURLE_GOT_NOTHING = 52
CURLE_SSL_ENGINE_NOTFOUND = 53
CURLE_SSL_ENGINE_SETFAILED = 54
CURLE_SEND_ERROR = 55
CURLE_RECV_ERROR = 56
CURLE_SSL_CERTPROBLEM = 58
CURLE_SSL_CIPHER = 59
CURLE_SSL_CACERT = 60
CURLE_BAD_CONTENT_ENCODING = 61
CURLE_LDAP_INVALID_URL = 62
CURLE_FILESIZE_EXCEEDED = 63
CURLE_USE_SSL_FAILED = 64
CURLE_SEND_FAIL_REWIND = 65
CURLE_SSL_ENGINE_INITFAILED = 66
CURLE_LOGIN_DENIED = 67
CURLE_TFTP_NOTFOUND = 68
CURLE_TFTP_PERM = 69
CURLE_REMOTE_DISK_FULL = 70
CURLE_TFTP_ILLEGAL = 71
CURLE_TFTP_UNKNOWNID = 72
CURLE_REMOTE_FILE_EXISTS = 73
CURLE_TFTP_NOSUCHUSER = 74
CURLE_CONV_FAILED = 75
CURLE_CONV_REQD = 76
CURLE_SSL_CACERT_BADFILE = 77
CURLE_REMOTE_FILE_NOT_FOUND = 78
CURLE_SSH = 79
CURLE_SSL_SHUTDOWN_FAILED = 80
CURLE_AGAIN = 81
CURLE_SSL_CRL_BADFILE = 82
CURLE_SSL_ISSUER_ERROR = 83
CURLE_FTP_PRET_FAILED = 84
CURLE_RTSP_CSEQ_ERROR = 85
CURLE_RTSP_SESSION_ERROR = 86
CURLE_FTP_BAD_FILE_LIST = 87
CURLE_CHUNK_FAILED = 88
CURLE_NO_CONNECTION_AVAILABLE = 89
CURLE_SSL_PINNEDPUBKEYNOTMATCH = 90
CURLE_SSL_INVALIDCERTSTATUS = 91
CURLE_HTTP2_STREAM = 92
CURLE_RECURSIVE_API_CALL = 93
CURLE_AUTH_ERROR = 94
CURLE_HTTP3 = 95
CURLE_QUIC_CONNECT_ERROR = 96
CURLE_PROXY = 97
CURLE_SSL_CLIENTCERT = 98
CURLE_UNRECOVERABLE_POLL = 99
CURL_LAST = 100
OK = 0
UNSUPPORTED_PROTOCOL = 1
FAILED_INIT = 2
URL_MALFORMAT = 3
COULDNT_RESOLVE_PROXY = 5
COULDNT_RESOLVE_HOST = 6
COULDNT_CONNECT = 7
FTP_WEIRD_SERVER_REPLY = 8
REMOTE_ACCESS_DENIED = 9
FTP_WEIRD_PASS_REPLY = 11
FTP_WEIRD_PASV_REPLY = 13
FTP_WEIRD_227_FORMAT = 14
FTP_CANT_GET_HOST = 15
FTP_COULDNT_SET_TYPE = 17
PARTIAL_FILE = 18
FTP_COULDNT_RETR_FILE = 19
QUOTE_ERROR = 21
HTTP_RETURNED_ERROR = 22
WRITE_ERROR = 23
UPLOAD_FAILED = 25
READ_ERROR = 26
OUT_OF_MEMORY = 27
OPERATION_TIMEDOUT = 28
FTP_PORT_FAILED = 30
FTP_COULDNT_USE_REST = 31
RANGE_ERROR = 33
HTTP_POST_ERROR = 34
SSL_CONNECT_ERROR = 35
BAD_DOWNLOAD_RESUME = 36
FILE_COULDNT_READ_FILE = 37
LDAP_CANNOT_BIND = 38
LDAP_SEARCH_FAILED = 39
FUNCTION_NOT_FOUND = 41
ABORTED_BY_CALLBACK = 42
BAD_FUNCTION_ARGUMENT = 43
INTERFACE_FAILED = 45
TOO_MANY_REDIRECTS = 47
UNKNOWN_TELNET_OPTION = 48
TELNET_OPTION_SYNTAX = 49
PEER_FAILED_VERIFICATION = 51
GOT_NOTHING = 52
SSL_ENGINE_NOTFOUND = 53
SSL_ENGINE_SETFAILED = 54
SEND_ERROR = 55
RECV_ERROR = 56
SSL_CERTPROBLEM = 58
SSL_CIPHER = 59
SSL_CACERT = 60
BAD_CONTENT_ENCODING = 61
LDAP_INVALID_URL = 62
FILESIZE_EXCEEDED = 63
USE_SSL_FAILED = 64
SEND_FAIL_REWIND = 65
SSL_ENGINE_INITFAILED = 66
LOGIN_DENIED = 67
TFTP_NOTFOUND = 68
TFTP_PERM = 69
REMOTE_DISK_FULL = 70
TFTP_ILLEGAL = 71
TFTP_UNKNOWNID = 72
REMOTE_FILE_EXISTS = 73
TFTP_NOSUCHUSER = 74
CONV_FAILED = 75
CONV_REQD = 76
SSL_CACERT_BADFILE = 77
REMOTE_FILE_NOT_FOUND = 78
SSH = 79
SSL_SHUTDOWN_FAILED = 80
AGAIN = 81
SSL_CRL_BADFILE = 82
SSL_ISSUER_ERROR = 83
FTP_PRET_FAILED = 84
RTSP_CSEQ_ERROR = 85
RTSP_SESSION_ERROR = 86
FTP_BAD_FILE_LIST = 87
CHUNK_FAILED = 88
NO_CONNECTION_AVAILABLE = 89
SSL_PINNEDPUBKEYNOTMATCH = 90
SSL_INVALIDCERTSTATUS = 91
HTTP2_STREAM = 92
RECURSIVE_API_CALL = 93
AUTH_ERROR = 94
HTTP3 = 95
QUIC_CONNECT_ERROR = 96
PROXY = 97
SSL_CLIENTCERT = 98
UNRECOVERABLE_POLL = 99
LAST = 100
class CURLoption(c_int):
@ -141,118 +141,118 @@ class CURLoption(c_int):
CURLOPTTYPE_STRINGPOINT = CURLOPTTYPE_OBJECTPOINT
CURLOPTTYPE_CBPOINT = CURLOPTTYPE_OBJECTPOINT
CURLOPT_WRITEDATA = CURLOPTTYPE_CBPOINT + 1
CURLOPT_URL = CURLOPTTYPE_STRINGPOINT + 2
CURLOPT_PORT = CURLOPTTYPE_LONG + 3
CURLOPT_PROXY = CURLOPTTYPE_STRINGPOINT + 4
CURLOPT_WRITEFUNCTION = CURLOPTTYPE_FUNCTIONPOINT + 11
CURLOPT_POSTFIELDS = CURLOPTTYPE_OBJECTPOINT + 15
CURLOPT_USERAGENT = CURLOPTTYPE_STRINGPOINT + 18
CURLOPT_COOKIE = CURLOPTTYPE_STRINGPOINT + 22
CURLOPT_HTTPHEADER = CURLOPTTYPE_SLISTPOINT + 23
CURLOPT_HEADERDATA = CURLOPTTYPE_CBPOINT + 29
CURLOPT_COOKIEFILE = CURLOPTTYPE_STRINGPOINT + 31
CURLOPT_CUSTOMREQUEST = CURLOPTTYPE_STRINGPOINT + 36
CURLOPT_POST = CURLOPTTYPE_LONG + 47
CURLOPT_FOLLOWLOCATION = CURLOPTTYPE_LONG + 52
CURLOPT_POSTFIELDSIZE = CURLOPTTYPE_LONG + 60
CURLOPT_SSL_VERIFYPEER = CURLOPTTYPE_LONG + 64
CURLOPT_MAXREDIRS = CURLOPTTYPE_LONG + 68
CURLOPT_HEADERFUNCTION = CURLOPTTYPE_FUNCTIONPOINT + 79
CURLOPT_HTTPGET = CURLOPTTYPE_LONG + 80
CURLOPT_SSL_VERIFYHOST = CURLOPTTYPE_LONG + 81
CURLOPT_COOKIEJAR = CURLOPTTYPE_STRINGPOINT + 82
CURLOPT_COOKIESESSION = CURLOPTTYPE_LONG + 96
CURLOPT_SHARE = CURLOPTTYPE_OBJECTPOINT + 100
CURLOPT_ACCEPT_ENCODING = CURLOPTTYPE_STRINGPOINT + 102
CURLOPT_CONNECT_ONLY = CURLOPTTYPE_LONG + 141
CURLOPT_TIMEOUT_MS = CURLOPTTYPE_LONG + 155
CURLOPT_CONNECTTIMEOUT_MS = CURLOPTTYPE_LONG + 156
WRITEDATA = CURLOPTTYPE_CBPOINT + 1
URL = CURLOPTTYPE_STRINGPOINT + 2
PORT = CURLOPTTYPE_LONG + 3
PROXY = CURLOPTTYPE_STRINGPOINT + 4
WRITEFUNCTION = CURLOPTTYPE_FUNCTIONPOINT + 11
POSTFIELDS = CURLOPTTYPE_OBJECTPOINT + 15
USERAGENT = CURLOPTTYPE_STRINGPOINT + 18
COOKIE = CURLOPTTYPE_STRINGPOINT + 22
HTTPHEADER = CURLOPTTYPE_SLISTPOINT + 23
HEADERDATA = CURLOPTTYPE_CBPOINT + 29
COOKIEFILE = CURLOPTTYPE_STRINGPOINT + 31
CUSTOMREQUEST = CURLOPTTYPE_STRINGPOINT + 36
POST = CURLOPTTYPE_LONG + 47
FOLLOWLOCATION = CURLOPTTYPE_LONG + 52
POSTFIELDSIZE = CURLOPTTYPE_LONG + 60
SSL_VERIFYPEER = CURLOPTTYPE_LONG + 64
MAXREDIRS = CURLOPTTYPE_LONG + 68
HEADERFUNCTION = CURLOPTTYPE_FUNCTIONPOINT + 79
HTTPGET = CURLOPTTYPE_LONG + 80
SSL_VERIFYHOST = CURLOPTTYPE_LONG + 81
COOKIEJAR = CURLOPTTYPE_STRINGPOINT + 82
COOKIESESSION = CURLOPTTYPE_LONG + 96
SHARE = CURLOPTTYPE_OBJECTPOINT + 100
ACCEPT_ENCODING = CURLOPTTYPE_STRINGPOINT + 102
CONNECT_ONLY = CURLOPTTYPE_LONG + 141
TIMEOUT_MS = CURLOPTTYPE_LONG + 155
CONNECTTIMEOUT_MS = CURLOPTTYPE_LONG + 156
class CURLINFO(c_int):
CURLINFO_STRING = 0x100000
CURLINFO_LONG = 0x200000
CURLINFO_DOUBLE = 0x300000
CURLINFO_SLIST = 0x400000
CURLINFO_PTR = 0x400000
CURLINFO_SOCKET = 0x500000
CURLINFO_OFF_T = 0x600000
CURLINFO_MASK = 0x0FFFFF
CURLINFO_TYPEMASK = 0xF00000
CURLINFO_NONE = 0
CURLINFO_EFFECTIVE_URL = CURLINFO_STRING + 1
CURLINFO_RESPONSE_CODE = CURLINFO_LONG + 2
CURLINFO_TOTAL_TIME = CURLINFO_DOUBLE + 3
CURLINFO_NAMELOOKUP_TIME = CURLINFO_DOUBLE + 4
CURLINFO_CONNECT_TIME = CURLINFO_DOUBLE + 5
CURLINFO_PRETRANSFER_TIME = CURLINFO_DOUBLE + 6
CURLINFO_SIZE_UPLOAD = CURLINFO_DOUBLE + 7
CURLINFO_SIZE_UPLOAD_T = CURLINFO_OFF_T + 7
CURLINFO_SIZE_DOWNLOAD = CURLINFO_DOUBLE + 8
CURLINFO_SIZE_DOWNLOAD_T = CURLINFO_OFF_T + 8
CURLINFO_SPEED_DOWNLOAD = CURLINFO_DOUBLE + 9
CURLINFO_SPEED_DOWNLOAD_T = CURLINFO_OFF_T + 9
CURLINFO_SPEED_UPLOAD = CURLINFO_DOUBLE + 10
CURLINFO_SPEED_UPLOAD_T = CURLINFO_OFF_T + 10
CURLINFO_HEADER_SIZE = CURLINFO_LONG + 11
CURLINFO_REQUEST_SIZE = CURLINFO_LONG + 12
CURLINFO_SSL_VERIFYRESULT = CURLINFO_LONG + 13
CURLINFO_FILETIME = CURLINFO_LONG + 14
CURLINFO_FILETIME_T = CURLINFO_OFF_T + 14
CURLINFO_CONTENT_LENGTH_DOWNLOAD = CURLINFO_DOUBLE + 15
CURLINFO_CONTENT_LENGTH_DOWNLOAD_T = CURLINFO_OFF_T + 15
CURLINFO_CONTENT_LENGTH_UPLOAD = CURLINFO_DOUBLE + 16
CURLINFO_CONTENT_LENGTH_UPLOAD_T = CURLINFO_OFF_T + 16
CURLINFO_STARTTRANSFER_TIME = CURLINFO_DOUBLE + 17
CURLINFO_CONTENT_TYPE = CURLINFO_STRING + 18
CURLINFO_REDIRECT_TIME = CURLINFO_DOUBLE + 19
CURLINFO_REDIRECT_COUNT = CURLINFO_LONG + 20
CURLINFO_PRIVATE = CURLINFO_STRING + 21
CURLINFO_HTTP_CONNECTCODE = CURLINFO_LONG + 22
CURLINFO_HTTPAUTH_AVAIL = CURLINFO_LONG + 23
CURLINFO_PROXYAUTH_AVAIL = CURLINFO_LONG + 24
CURLINFO_OS_ERRNO = CURLINFO_LONG + 25
CURLINFO_NUM_CONNECTS = CURLINFO_LONG + 26
CURLINFO_SSL_ENGINES = CURLINFO_SLIST + 27
CURLINFO_COOKIELIST = CURLINFO_SLIST + 28
CURLINFO_LASTSOCKET = CURLINFO_LONG + 29
CURLINFO_FTP_ENTRY_PATH = CURLINFO_STRING + 30
CURLINFO_REDIRECT_URL = CURLINFO_STRING + 31
CURLINFO_PRIMARY_IP = CURLINFO_STRING + 32
CURLINFO_APPCONNECT_TIME = CURLINFO_DOUBLE + 33
CURLINFO_CERTINFO = CURLINFO_PTR + 34
CURLINFO_CONDITION_UNMET = CURLINFO_LONG + 35
CURLINFO_RTSP_SESSION_ID = CURLINFO_STRING + 36
CURLINFO_RTSP_CLIENT_CSEQ = CURLINFO_LONG + 37
CURLINFO_RTSP_SERVER_CSEQ = CURLINFO_LONG + 38
CURLINFO_RTSP_CSEQ_RECV = CURLINFO_LONG + 39
CURLINFO_PRIMARY_PORT = CURLINFO_LONG + 40
CURLINFO_LOCAL_IP = CURLINFO_STRING + 41
CURLINFO_LOCAL_PORT = CURLINFO_LONG + 42
CURLINFO_TLS_SESSION = CURLINFO_PTR + 43
CURLINFO_ACTIVESOCKET = CURLINFO_SOCKET + 44
CURLINFO_TLS_SSL_PTR = CURLINFO_PTR + 45
CURLINFO_HTTP_VERSION = CURLINFO_LONG + 46
CURLINFO_PROXY_SSL_VERIFYRESULT = CURLINFO_LONG + 47
CURLINFO_PROTOCOL = CURLINFO_LONG + 48
CURLINFO_SCHEME = CURLINFO_STRING + 49
CURLINFO_TOTAL_TIME_T = CURLINFO_OFF_T + 50
CURLINFO_NAMELOOKUP_TIME_T = CURLINFO_OFF_T + 51
CURLINFO_CONNECT_TIME_T = CURLINFO_OFF_T + 52
CURLINFO_PRETRANSFER_TIME_T = CURLINFO_OFF_T + 53
CURLINFO_STARTTRANSFER_TIME_T = CURLINFO_OFF_T + 54
CURLINFO_REDIRECT_TIME_T = CURLINFO_OFF_T + 55
CURLINFO_APPCONNECT_TIME_T = CURLINFO_OFF_T + 56
CURLINFO_RETRY_AFTER = CURLINFO_OFF_T + 57
CURLINFO_EFFECTIVE_METHOD = CURLINFO_STRING + 58
CURLINFO_PROXY_ERROR = CURLINFO_LONG + 59
CURLINFO_REFERER = CURLINFO_STRING + 60
CURLINFO_CAINFO = CURLINFO_STRING + 61
CURLINFO_CAPATH = CURLINFO_STRING + 62
CURLINFO_XFER_ID = CURLINFO_OFF_T + 63
CURLINFO_CONN_ID = CURLINFO_OFF_T + 64
CURLINFO_LASTONE = 64
STRING = 0x100000
LONG = 0x200000
DOUBLE = 0x300000
SLIST = 0x400000
PTR = 0x400000
SOCKET = 0x500000
OFF_T = 0x600000
MASK = 0x0FFFFF
TYPEMASK = 0xF00000
NONE = 0
EFFECTIVE_URL = STRING + 1
RESPONSE_CODE = LONG + 2
TOTAL_TIME = DOUBLE + 3
NAMELOOKUP_TIME = DOUBLE + 4
CONNECT_TIME = DOUBLE + 5
PRETRANSFER_TIME = DOUBLE + 6
SIZE_UPLOAD = DOUBLE + 7
SIZE_UPLOAD_T = OFF_T + 7
SIZE_DOWNLOAD = DOUBLE + 8
SIZE_DOWNLOAD_T = OFF_T + 8
SPEED_DOWNLOAD = DOUBLE + 9
SPEED_DOWNLOAD_T = OFF_T + 9
SPEED_UPLOAD = DOUBLE + 10
SPEED_UPLOAD_T = OFF_T + 10
HEADER_SIZE = LONG + 11
REQUEST_SIZE = LONG + 12
SSL_VERIFYRESULT = LONG + 13
FILETIME = LONG + 14
FILETIME_T = OFF_T + 14
CONTENT_LENGTH_DOWNLOAD = DOUBLE + 15
CONTENT_LENGTH_DOWNLOAD_T = OFF_T + 15
CONTENT_LENGTH_UPLOAD = DOUBLE + 16
CONTENT_LENGTH_UPLOAD_T = OFF_T + 16
STARTTRANSFER_TIME = DOUBLE + 17
CONTENT_TYPE = STRING + 18
REDIRECT_TIME = DOUBLE + 19
REDIRECT_COUNT = LONG + 20
PRIVATE = STRING + 21
HTTP_CONNECTCODE = LONG + 22
HTTPAUTH_AVAIL = LONG + 23
PROXYAUTH_AVAIL = LONG + 24
OS_ERRNO = LONG + 25
NUM_CONNECTS = LONG + 26
SSL_ENGINES = SLIST + 27
COOKIELIST = SLIST + 28
LASTSOCKET = LONG + 29
FTP_ENTRY_PATH = STRING + 30
REDIRECT_URL = STRING + 31
PRIMARY_IP = STRING + 32
APPCONNECT_TIME = DOUBLE + 33
CERTINFO = PTR + 34
CONDITION_UNMET = LONG + 35
RTSP_SESSION_ID = STRING + 36
RTSP_CLIENT_CSEQ = LONG + 37
RTSP_SERVER_CSEQ = LONG + 38
RTSP_CSEQ_RECV = LONG + 39
PRIMARY_PORT = LONG + 40
LOCAL_IP = STRING + 41
LOCAL_PORT = LONG + 42
TLS_SESSION = PTR + 43
ACTIVESOCKET = SOCKET + 44
TLS_SSL_PTR = PTR + 45
HTTP_VERSION = LONG + 46
PROXY_SSL_VERIFYRESULT = LONG + 47
PROTOCOL = LONG + 48
SCHEME = STRING + 49
TOTAL_TIME_T = OFF_T + 50
NAMELOOKUP_TIME_T = OFF_T + 51
CONNECT_TIME_T = OFF_T + 52
PRETRANSFER_TIME_T = OFF_T + 53
STARTTRANSFER_TIME_T = OFF_T + 54
REDIRECT_TIME_T = OFF_T + 55
APPCONNECT_TIME_T = OFF_T + 56
RETRY_AFTER = OFF_T + 57
EFFECTIVE_METHOD = STRING + 58
PROXY_ERROR = LONG + 59
REFERER = STRING + 60
CAINFO = STRING + 61
CAPATH = STRING + 62
XFER_ID = OFF_T + 63
CONN_ID = OFF_T + 64
LASTONE = 64
curl_global_init = libcurl.curl_global_init
@ -325,7 +325,7 @@ class CURLException(NetWorkException):
self.errorcode = code.value
error = curl_easy_strerror(code).decode("utf8")
for _ in dir(CURLcode):
if _.startswith("CURLE_") and code.value == getattr(CURLcode, _):
if _.startswith("") and code.value == getattr(CURLcode, _):
error = str(code.value) + " " + _ + " : " + error
break
else:

View File

@ -1,7 +1,7 @@
from libcurl import *
from .libcurl import *
import threading, functools, queue
from ctypes import c_long, cast, pointer, POINTER, c_char
from network.requests_common import *
from requests import ResponseBase, Timeout, Requester_common
from traceback import print_exc
@ -45,7 +45,7 @@ def ExceptionFilter(func):
_ = func(*args, **kwargs)
return _
except CURLException as e:
if e.errorcode == CURLcode.CURLE_OPERATION_TIMEDOUT:
if e.errorcode == CURLcode.OPERATION_TIMEDOUT:
raise Timeout(e)
else:
raise e
@ -53,8 +53,7 @@ def ExceptionFilter(func):
return _wrapper
class Session(Sessionbase):
class Requester(Requester_common):
def raise_for_status(self):
if self.last_error:
raise CURLException(self.last_error)
@ -62,7 +61,7 @@ class Session(Sessionbase):
def _getStatusCode(self, curl):
status_code = c_long()
self.last_error = curl_easy_getinfo(
curl, CURLINFO.CURLINFO_RESPONSE_CODE, pointer(status_code)
curl, CURLINFO.RESPONSE_CODE, pointer(status_code)
)
self.raise_for_status()
return status_code.value
@ -70,17 +69,17 @@ class Session(Sessionbase):
def _set_proxy(self, curl, proxy):
if proxy:
self.last_error = curl_easy_setopt(
curl, CURLoption.CURLOPT_PROXY, proxy.encode("utf8")
curl, CURLoption.PROXY, proxy.encode("utf8")
)
self.raise_for_status()
def _set_verify(self, curl, verify):
if verify == False:
curl_easy_setopt(curl, CURLoption.CURLOPT_SSL_VERIFYPEER, 0)
curl_easy_setopt(curl, CURLoption.CURLOPT_SSL_VERIFYHOST, 0)
curl_easy_setopt(curl, CURLoption.SSL_VERIFYPEER, 0)
curl_easy_setopt(curl, CURLoption.SSL_VERIFYHOST, 0)
else:
curl_easy_setopt(curl, CURLoption.CURLOPT_SSL_VERIFYPEER, 1)
curl_easy_setopt(curl, CURLoption.CURLOPT_SSL_VERIFYHOST, 2)
curl_easy_setopt(curl, CURLoption.SSL_VERIFYPEER, 1)
curl_easy_setopt(curl, CURLoption.SSL_VERIFYHOST, 2)
def _perform(self, curl):
self.last_error = curl_easy_perform(curl)
@ -88,11 +87,39 @@ class Session(Sessionbase):
def _set_allow_redirects(self, curl, allow_redirects):
curl_easy_setopt(curl, CURLoption.CURLOPT_FOLLOWLOCATION, int(allow_redirects))
# curl_easy_setopt(curl, CURLoption.CURLOPT_MAXREDIRS, 100) #默认50够了
curl_easy_setopt(curl, CURLoption.FOLLOWLOCATION, int(allow_redirects))
# curl_easy_setopt(curl, CURLoption.MAXREDIRS, 100) #默认50够了
def __WriteMemoryCallback(self, headerqueue, que, contents, size, nmemb, userp):
if headerqueue:
headerqueue.put(0)
realsize = size * nmemb
bs = cast(contents, POINTER(c_char))[:realsize]
if isinstance(que, queue.Queue):
que.put(bs)
elif isinstance(que, list):
que.append(bs)
return realsize
def __getrealheader(self, headerqueue):
if isinstance(headerqueue, queue.Queue):
header = ""
while True:
_headerb = headerqueue.get()
if _headerb == 0:
break
elif _headerb == 1:
self.raise_for_status()
_headerb = _headerb.decode("utf8")
if _headerb.startswith("HTTP/"):
header = ""
header += _headerb
return header
elif isinstance(headerqueue, list):
return b"".join(headerqueue).decode("utf8")
@ExceptionFilter
def request_impl(
def request(
self,
method,
scheme,
@ -111,84 +138,74 @@ class Session(Sessionbase):
allow_redirects,
):
curl = AutoCURLHandle(curl_easy_init())
curl_easy_setopt(curl, CURLoption.CURLOPT_COOKIEJAR, "")
curl_easy_setopt(curl, CURLoption.CURLOPT_USERAGENT, self.UA.encode("utf8"))
if cookies:
cookies.update(self.cookies)
else:
cookies = self.cookies
curl_easy_setopt(curl, CURLoption.COOKIEJAR, "")
curl_easy_setopt(
curl, CURLoption.USERAGENT, headers["User-Agent"].encode("utf8")
)
if cookies:
cookie = self._parsecookie(cookies)
curl_easy_setopt(curl, CURLoption.CURLOPT_COOKIE, cookie.encode("utf8"))
curl_easy_setopt(curl, CURLoption.COOKIE, cookie.encode("utf8"))
if timeout:
curl_easy_setopt(curl, CURLoption.CURLOPT_TIMEOUT_MS, timeout)
curl_easy_setopt(curl, CURLoption.CURLOPT_CONNECTTIMEOUT_MS, timeout)
curl_easy_setopt(curl, CURLoption.TIMEOUT_MS, timeout)
curl_easy_setopt(curl, CURLoption.CONNECTTIMEOUT_MS, timeout)
curl_easy_setopt(
curl,
CURLoption.CURLOPT_ACCEPT_ENCODING,
CURLoption.ACCEPT_ENCODING,
headers["Accept-Encoding"].encode("utf8"),
)
curl_easy_setopt(
curl, CURLoption.CURLOPT_CUSTOMREQUEST, method.upper().encode("utf8")
)
curl_easy_setopt(curl, CURLoption.CUSTOMREQUEST, method.upper().encode("utf8"))
self.last_error = curl_easy_setopt(
curl, CURLoption.CURLOPT_URL, url.encode("utf8")
)
self.last_error = curl_easy_setopt(curl, CURLoption.URL, url.encode("utf8"))
self.raise_for_status()
curl_easy_setopt(curl, CURLoption.CURLOPT_PORT, port)
curl_easy_setopt(curl, CURLoption.PORT, port)
lheaders = Autoslist()
for _ in self._parseheader(headers, None):
lheaders = curl_slist_append(
cast(lheaders, POINTER(curl_slist)), _.encode("utf8")
)
self.last_error = curl_easy_setopt(
curl, CURLoption.CURLOPT_HTTPHEADER, lheaders
)
self.last_error = curl_easy_setopt(curl, CURLoption.HTTPHEADER, lheaders)
self.raise_for_status()
self._set_verify(curl, verify)
self._set_proxy(curl, proxy)
self._set_allow_redirects(curl, allow_redirects)
if datalen:
curl_easy_setopt(curl, CURLoption.CURLOPT_POSTFIELDS, dataptr)
curl_easy_setopt(curl, CURLoption.CURLOPT_POSTFIELDSIZE, datalen)
curl_easy_setopt(curl, CURLoption.POSTFIELDS, dataptr)
curl_easy_setopt(curl, CURLoption.POSTFIELDSIZE, datalen)
resp = Response()
resp.keeprefs.append(curl)
if stream:
def WriteMemoryCallback(headerqueue, queue, contents, size, nmemb, userp):
if headerqueue:
headerqueue.put(0)
realsize = size * nmemb
queue.put(cast(contents, POINTER(c_char))[:realsize])
return realsize
_content = []
_headers = []
headerqueue = queue.Queue()
keepref1 = WRITEFUNCTION(
functools.partial(WriteMemoryCallback, headerqueue, resp.queue)
)
keepref2 = WRITEFUNCTION(
functools.partial(WriteMemoryCallback, None, headerqueue)
)
curl_easy_setopt(
curl,
CURLoption.CURLOPT_WRITEFUNCTION,
cast(keepref1, c_void_p).value,
)
_notif = headerqueue
else:
headerqueue = []
_notif = None
resp.queue = []
keepref1 = WRITEFUNCTION(
functools.partial(self.__WriteMemoryCallback, _notif, resp.queue)
)
keepref2 = WRITEFUNCTION(
functools.partial(self.__WriteMemoryCallback, None, headerqueue)
)
curl_easy_setopt(
curl,
CURLoption.WRITEFUNCTION,
cast(keepref1, c_void_p).value,
)
curl_easy_setopt(
curl,
CURLoption.CURLOPT_HEADERFUNCTION,
cast(keepref2, c_void_p).value,
)
resp.keeprefs += [keepref1, keepref2]
curl_easy_setopt(
curl,
CURLoption.HEADERFUNCTION,
cast(keepref2, c_void_p).value,
)
resp.keeprefs += [keepref1, keepref2]
if stream:
def ___perform():
error = False
@ -205,50 +222,15 @@ class Session(Sessionbase):
threading.Thread(target=___perform, daemon=True).start()
headerb = ""
while True:
_headerb = headerqueue.get()
if _headerb == 0:
break
elif _headerb == 1:
self.raise_for_status()
_headerb = _headerb.decode("utf8")
if _headerb.startswith("HTTP/"):
headerb = ""
headerb += _headerb
resp.headers = self._update_header_cookie(headerb)
resp.status_code = self._getStatusCode(curl)
else:
def WriteMemoryCallback(saver, contents, size, nmemb, userp):
realsize = size * nmemb
saver.append(cast(contents, POINTER(c_char))[:realsize])
return realsize
_content = []
_headers = []
keepref1 = WRITEFUNCTION(functools.partial(WriteMemoryCallback, _content))
keepref2 = WRITEFUNCTION(functools.partial(WriteMemoryCallback, _headers))
curl_easy_setopt(
curl,
CURLoption.CURLOPT_WRITEFUNCTION,
cast(keepref1, c_void_p).value,
)
curl_easy_setopt(
curl,
CURLoption.CURLOPT_HEADERFUNCTION,
cast(keepref2, c_void_p).value,
)
resp.keeprefs += [keepref1, keepref2]
self._perform(curl)
resp.content = b"".join(_content)
resp.headers = self._update_header_cookie(b"".join(_headers).decode("utf8"))
resp.status_code = self._getStatusCode(curl)
header = self.__getrealheader(headerqueue)
if not stream:
resp.content = b"".join(resp.queue)
resp.headers, resp.cookies = self._parseheader2dict(header)
resp.status_code = self._getStatusCode(curl)
resp.last_error = self.last_error
resp.cookies = self.cookies
return resp
Sessionimpl[0] = Session

View File

@ -1,5 +1,5 @@
from ctypes import c_void_p, cast, c_size_t, pointer, create_string_buffer, POINTER
from libcurl import *
from .libcurl import *
from urllib.parse import urlsplit
import time
@ -26,7 +26,7 @@ class WebSocket:
error = curl_ws_recv(
self.curl, buffer, (10240), pointer(rlen), pointer(meta)
)
if error.value == CURLcode.CURLE_AGAIN:
if error.value == CURLcode.AGAIN:
time.sleep(0.01)
elif error:
raise CURLException(error)
@ -57,7 +57,7 @@ class WebSocket:
if http_proxy_host is None or http_proxy_port is None:
return
proxy = "{}:{}".format(http_proxy_host, http_proxy_port)
curl_easy_setopt(curl, CURLoption.CURLOPT_PROXY, proxy.encode("utf8"))
curl_easy_setopt(curl, CURLoption.PROXY, proxy.encode("utf8"))
def _parseurl2serverandpath(self, url):
url = url.strip()
@ -86,19 +86,19 @@ class WebSocket:
def _set_verify(self, curl, verify):
if verify == False:
curl_easy_setopt(curl, CURLoption.CURLOPT_SSL_VERIFYPEER, 0)
curl_easy_setopt(curl, CURLoption.CURLOPT_SSL_VERIFYHOST, 0)
curl_easy_setopt(curl, CURLoption.SSL_VERIFYPEER, 0)
curl_easy_setopt(curl, CURLoption.SSL_VERIFYHOST, 0)
else:
curl_easy_setopt(curl, CURLoption.CURLOPT_SSL_VERIFYPEER, 1)
curl_easy_setopt(curl, CURLoption.CURLOPT_SSL_VERIFYHOST, 2)
curl_easy_setopt(curl, CURLoption.SSL_VERIFYPEER, 1)
curl_easy_setopt(curl, CURLoption.SSL_VERIFYHOST, 2)
def connect(self, url, header=None, http_proxy_host=None, http_proxy_port=None):
https, server, port, path = self._parseurl2serverandpath(url)
self.curl = AutoCURLHandle(curl_easy_init())
curl_easy_setopt(self.curl, CURLoption.CURLOPT_URL, url.encode("utf8"))
curl_easy_setopt(self.curl, CURLoption.URL, url.encode("utf8"))
curl_easy_setopt(self.curl, CURLoption.CURLOPT_CONNECT_ONLY, 2)
curl_easy_setopt(self.curl, CURLoption.CURLOPT_PORT, port)
curl_easy_setopt(self.curl, CURLoption.CONNECT_ONLY, 2)
curl_easy_setopt(self.curl, CURLoption.PORT, port)
self._setproxy(self.curl, http_proxy_host, http_proxy_port)
self._set_verify(self.curl, False)
lheaders = Autoslist()
@ -107,14 +107,8 @@ class WebSocket:
lheaders = curl_slist_append(
cast(lheaders, POINTER(curl_slist)), _.encode("utf8")
)
curl_easy_setopt(self.curl, CURLoption.CURLOPT_HTTPHEADER, lheaders)
curl_easy_setopt(self.curl, CURLoption.HTTPHEADER, lheaders)
error = curl_easy_perform(self.curl)
if error:
raise CURLException(error)
def create_connection(url, **x):
_ = WebSocket()
_.connect(url)
return _

View File

@ -1,12 +1,14 @@
from winhttp import *
from network.requests_common import *
from .winhttp import *
from requests import ResponseBase, Timeout, Requester_common
from traceback import print_exc
import gzip, zlib
from ctypes import pointer, create_string_buffer, create_unicode_buffer
try:
from brotli_dec import decompress
from .brotli_dec import decompress
except:
pass
from traceback import print_exc
print_exc()
class Response(ResponseBase):
@ -58,20 +60,7 @@ def ExceptionFilter(func):
return _wrapper
class Session(Sessionbase):
def __init__(self) -> None:
super().__init__()
self.hSession = AutoWinHttpHandle(
WinHttpOpen(
self.UA,
WINHTTP_ACCESS_TYPE_DEFAULT_PROXY,
WINHTTP_NO_PROXY_NAME,
WINHTTP_NO_PROXY_BYPASS,
0,
)
)
if self.hSession == 0:
raise WinhttpException(GetLastError())
class Requester(Requester_common):
def _getheaders(self, hreq):
dwSize = DWORD()
@ -138,7 +127,7 @@ class Session(Sessionbase):
)
@ExceptionFilter
def request_impl(
def request(
self,
method,
scheme,
@ -146,7 +135,7 @@ class Session(Sessionbase):
port,
param,
url,
headers,
_headers,
cookies,
dataptr,
datalen,
@ -156,12 +145,22 @@ class Session(Sessionbase):
timeout,
allow_redirects,
):
headers = self._parseheader(headers, cookies)
headers = self._parseheader(_headers, cookies)
flag = WINHTTP_FLAG_SECURE if scheme == "https" else 0
# print(server,port,param,dataptr)
headers = "\r\n".join(headers)
hConnect = AutoWinHttpHandle(WinHttpConnect(self.hSession, server, port, 0))
hSession = AutoWinHttpHandle(
WinHttpOpen(
_headers["User-Agent"],
WINHTTP_ACCESS_TYPE_DEFAULT_PROXY,
WINHTTP_NO_PROXY_NAME,
WINHTTP_NO_PROXY_BYPASS,
0,
)
)
if hSession == 0:
raise WinhttpException(GetLastError())
hConnect = AutoWinHttpHandle(WinHttpConnect(hSession, server, port, 0))
if hConnect == 0:
raise WinhttpException(GetLastError())
hRequest = AutoWinHttpHandle(
@ -191,16 +190,14 @@ class Session(Sessionbase):
succ = WinHttpReceiveResponse(hRequest, None)
if succ == 0:
raise WinhttpException(GetLastError())
headers = self._update_header_cookie(self._getheaders(hRequest))
resp = Response()
resp.headers, resp.cookies = self._parseheader2dict(self._getheaders(hRequest))
resp.status_code = self._getStatusCode(hRequest)
resp.headers = headers
resp.cookies = self.cookies
if stream:
resp.hSession = hSession
resp.hconn = hConnect
resp.hreq = hRequest
resp.keepref = self
return resp
availableSize = DWORD()
downloadedSize = DWORD()
@ -219,7 +216,7 @@ class Session(Sessionbase):
raise WinhttpException(GetLastError())
downloadeddata += buff[: downloadedSize.value]
resp.content = self.decompress(downloadeddata, headers)
resp.content = self.decompress(downloadeddata, resp.headers)
return resp
@ -237,7 +234,5 @@ class Session(Sessionbase):
data = decompress(data)
return data
except:
print_exc()
raise Exception("unenable to decompress {}".format(encode))
Sessionimpl[0] = Session

View File

@ -1,4 +1,4 @@
from winhttp import *
from .winhttp import *
from urllib.parse import urlsplit
from ctypes import pointer, create_string_buffer
@ -156,9 +156,3 @@ class WebSocket:
if self.hWebSocketHandle == 0:
raise WinhttpException(GetLastError())
def create_connection(url, **x):
_ = WebSocket()
_.connect(url)
return _

View File

@ -1,6 +1,6 @@
from ctypes import windll, POINTER, pointer, Structure, sizeof
from ctypes.wintypes import LPCWSTR, DWORD, LPVOID, WORD, BOOL, LPCVOID, LPWSTR, USHORT
from network.requests_common import NetWorkException
from requests import NetWorkException
class WinhttpException(NetWorkException):

View File

@ -3,6 +3,7 @@ from collections.abc import Mapping, MutableMapping
from collections import OrderedDict
from urllib.parse import urlencode, urlsplit
from functools import partial
from myutils.config import globalconfig
class NetWorkException(Exception):
@ -59,11 +60,10 @@ class CaseInsensitiveDict(MutableMapping):
class ResponseBase:
def __init__(self):
self.headers = CaseInsensitiveDict()
self.cookies = {}
self.status_code = 0
self.content = b"{}"
headers = CaseInsensitiveDict()
cookies = {}
status_code = 0
content = b""
@property
def text(self):
@ -118,9 +118,49 @@ class ResponseBase:
yield pending
class Sessionbase:
class Requester_common:
def request(self, *argc) -> ResponseBase: ...
def _parseheader(self, headers, cookies):
_x = []
if cookies:
cookie = self._parsecookie(cookies)
headers.update({"Cookie": cookie})
for k in sorted(headers.keys()):
_x.append("{}: {}".format(k, headers[k]))
return _x
def _parsecookie(self, cookie):
_c = []
for k, v in cookie.items():
_c.append("{}={}".format(k, v))
return "; ".join(_c)
def _parseheader2dict(self, headerstr):
# print(headerstr)
header = CaseInsensitiveDict()
cookie = {}
for line in headerstr.split("\r\n")[1:]:
idx = line.find(": ")
if idx == -1:
continue
if line[:idx].lower() == "set-cookie":
_c = line[idx + 2 :].split("; ")[0]
_idx = _c.find("=")
cookie[_c[:_idx]] = _c[_idx + 1 :]
else:
header[line[:idx]] = line[idx + 2 :]
return CaseInsensitiveDict(header), cookie
class Session:
def __init__(self) -> None:
self.requester = None
self.requester_type = None
self.UA = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
self.last_error = 0
self.cookies = {}
self.headers = CaseInsensitiveDict(
@ -211,45 +251,12 @@ class Sessionbase:
url = scheme + "://" + server + path
return scheme, server, port, path, url
def _parseheader(self, headers, cookies):
_x = []
if cookies:
cookie = self._parsecookie(cookies)
headers.update({"Cookie": cookie})
for k in sorted(headers.keys()):
_x.append("{}: {}".format(k, headers[k]))
return _x
def _parsecookie(self, cookie):
_c = []
for k, v in cookie.items():
_c.append("{}={}".format(k, v))
return "; ".join(_c)
def _update_header_cookie(self, headerstr):
headers, cookies = self._parseheader2dict(headerstr)
self.cookies.update(cookies)
return headers
def _parseheader2dict(self, headerstr):
# print(headerstr)
header = CaseInsensitiveDict()
cookie = {}
for line in headerstr.split("\r\n")[1:]:
idx = line.find(": ")
if idx == -1:
continue
if line[:idx].lower() == "set-cookie":
_c = line[idx + 2 :].split("; ")[0]
_idx = _c.find("=")
cookie[_c[:_idx]] = _c[_idx + 1 :]
else:
header[line[:idx]] = line[idx + 2 :]
return CaseInsensitiveDict(header), cookie
def request_impl(self, *args):
pass
def loadrequester(self) -> Requester_common:
if globalconfig["network"] == 1:
from network.libcurl.requester import Requester
elif globalconfig["network"] == 0:
from network.winhttp.requester import Requester
return Requester()
def request(
self,
@ -298,7 +305,9 @@ class Sessionbase:
except:
print("Error invalid timeout", timeout)
timeout = None
_ = self.request_impl(
if cookies:
self.cookies.update(cookies)
response = self.loadrequester().request(
method,
scheme,
server,
@ -306,7 +315,7 @@ class Sessionbase:
param,
url,
headers,
cookies,
self.cookies,
dataptr,
datalen,
proxy,
@ -315,8 +324,9 @@ class Sessionbase:
timeout,
allow_redirects,
)
return _
self.cookies.update(response.cookies)
response.cookies.update(self.cookies)
return response
def get(self, url, **kwargs):
return self.request("GET", url, **kwargs)
@ -334,16 +344,13 @@ class Sessionbase:
return self.request("DELETE", url, **kwargs)
Sessionimpl = [Sessionbase]
def request(method, url, **kwargs):
with Sessionimpl[0]() as session:
with Session() as session:
return session.request(method=method, url=url, **kwargs)
def session():
with Sessionimpl[0]() as session:
with Session() as session:
return session

View File

@ -2,7 +2,7 @@ from translator.basetranslator_dev import basetransdev
class TS(basetransdev):
target_url = "https://fanyi.baidu.com/mtpe-individual/multimodal#/"
target_url = "https://fanyi.baidu.com/mtpe-individual/multimodal"
def langmap(self):
return {
@ -20,7 +20,7 @@ class TS(basetransdev):
"""document.querySelector("#editor-text > div.AZLVLJHb > div.Ssl84aLh > span").click()"""
)
self.Runtime_evaluate(
"""document.querySelector("#editor-text > div.AZLVLJHb > div.Ssl84aLh > div > div > div").click()"""
"""document.querySelector("#editor-text > div.AZLVLJHb > div.Ssl84aLh > div.NNh5PamB.GEptIbSX > div > div").click()"""
)
self.send_keys(content)
return self.wait_for_result(

View File

@ -0,0 +1,12 @@
from myutils.config import globalconfig
def create_connection(url, header=None, http_proxy_host=None, http_proxy_port=None):
if globalconfig["network"] == 1:
from network.libcurl.websocket import WebSocket
elif globalconfig["network"] == 0:
from network.winhttp.websocket import WebSocket
_ = WebSocket()
_.connect(url, header, http_proxy_host, http_proxy_port)
return _

View File

@ -28,8 +28,8 @@ set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_SOURCE_DIR}/version)
include(generate_product_version)
set(VERSION_MAJOR 5)
set(VERSION_MINOR 7)
set(VERSION_PATCH 2)
set(VERSION_MINOR 8)
set(VERSION_PATCH 0)
add_library(pch pch.cpp)
target_precompile_headers(pch PUBLIC pch.h)

View File

@ -38,7 +38,7 @@
#include <mshtml.h>
#include <stdlib.h>
#include <iostream>
#include <map>
#include <winbase.h>
#include <wincon.h>

View File

@ -57,34 +57,25 @@ int wmain(int argc, wchar_t *argv[])
if (checkisapatch())
return 1;
auto argv0 = std::wstring(argv[1]);
if (argv0 == L"dllinject")
return dllinjectwmain(argc - 1, argv + 1);
if (argv0 == L"ntleas")
return ntleaswmain(argc - 1, argv + 1);
if (argv0 == L"listpm")
return listprocessmodule(argc - 1, argv + 1);
if (argv0 == L"update")
return updatewmain(argc - 1, argv + 1);
typedef int (*wmaint)(int, wchar_t **);
std::map<std::wstring, wmaint> fm = {
{L"dllinject", dllinjectwmain},
{L"ntleas", ntleaswmain},
{L"listpm", listprocessmodule},
{L"update", updatewmain},
#ifndef _WIN64
else if (argv0 == L"mainmp3")
return mainmp3(argc - 1, argv + 1);
else if (argv0 == L"LR")
return LRwmain(argc - 1, argv + 1);
else if (argv0 == L"le")
return lewmain(argc - 1, argv + 1);
else if (argv0 == L"jbj7")
return jbjwmain(argc - 1, argv + 1);
else if (argv0 == L"dreye")
return dreyewmain(argc - 1, argv + 1);
else if (argv0 == L"kingsoft")
return kingsoftwmain(argc - 1, argv + 1);
else if (argv0 == L"voiceroid2")
return voiceroid2wmain(argc - 1, argv + 1);
else if (argv0 == L"neospeech")
return neospeech(argc - 1, argv + 1);
else if (argv0 == L"neospeechlist")
return neospeechlist(argc - 1, argv + 1);
{L"mainmp3", mainmp3},
{L"LR", LRwmain},
{L"le", lewmain},
{L"jbj7", jbjwmain},
{L"dreye", dreyewmain},
{L"kingsoft", kingsoftwmain},
{L"voiceroid2", voiceroid2wmain},
{L"neospeech", neospeech},
{L"neospeechlist", neospeechlist},
#else
#endif // !_WIN64
};
return fm[argv0](argc - 1, argv + 1);
}