File size: 6,536 Bytes
a4b70d9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 |
from __future__ import annotations
import os
import asyncio
import requests
import json
try:
import nodriver
except ImportError:
pass
from ..typing import AsyncResult, Messages
from ..config import DEFAULT_MODEL
from ..requests import get_args_from_nodriver, raise_for_status
from ..providers.base_provider import AuthFileMixin
from .template import OpenaiTemplate
from .helper import get_last_user_message
from .. import debug
class EasyChat(OpenaiTemplate, AuthFileMixin):
url = "https://chat3.eqing.tech"
api_base = f"{url}/api/openai/v1"
api_endpoint = f"{api_base}/chat/completions"
working = False
active_by_default = True
use_model_names = True
default_model = DEFAULT_MODEL.split("/")[-1]
model_aliases = {
DEFAULT_MODEL: f"{default_model}-free",
}
captchaToken: str = None
share_url: str = None
looked: bool = False
guestId: str = None
@classmethod
def get_models(cls, **kwargs) -> list[str]:
if not cls.models:
models = super().get_models(**kwargs)
models = {m.replace("-free", ""): m for m in models if m.endswith("-free")}
cls.model_aliases.update(models)
cls.models = list(models)
return cls.models
@classmethod
async def create_async_generator(
cls,
model: str,
messages: Messages,
stream: bool = True,
proxy: str = None,
extra_body: dict = None,
**kwargs
) -> AsyncResult:
cls.share_url = os.getenv("G4F_SHARE_URL")
model = cls.get_model(model.replace("-free", ""))
args = None
cache_file = cls.get_cache_file()
async def callback(page):
cls.captchaToken = None
def on_request(event: nodriver.cdp.network.RequestWillBeSent, page=None):
if event.request.url != cls.api_endpoint:
return
if not event.request.post_data:
return
cls.captchaToken = json.loads(event.request.post_data).get("captchaToken")
await page.send(nodriver.cdp.network.enable())
page.add_handler(nodriver.cdp.network.RequestWillBeSent, on_request)
button = await page.find("我已知晓")
if button:
await button.click()
else:
debug.error("No 'Agree' button found.")
for _ in range(3):
await asyncio.sleep(1)
for _ in range(300):
modal = await page.find("Verifying...")
if not modal:
break
debug.log("EasyChat: Waiting for captcha verification...")
await asyncio.sleep(1)
if cls.captchaToken:
debug.log("EasyChat: Captcha token found, proceeding.")
break
textarea = await page.select("[contenteditable=\"true\"]", 180)
if textarea is not None:
await textarea.send_keys("Hello")
await asyncio.sleep(1)
button = await page.select("button[class*='chat_chat-input-send']")
if button:
await button.click()
for _ in range(300):
await asyncio.sleep(1)
if cls.captchaToken:
break
cls.guestId = await page.evaluate('"" + JSON.parse(localStorage.getItem("user-info") || "{}")?.state?.guestId')
await asyncio.sleep(3)
if cache_file.exists():
with cache_file.open("r") as f:
args = json.load(f)
cls.captchaToken = args.pop("captchaToken")
cls.guestId = args.pop("guestId", None)
if cls.captchaToken:
debug.log("EasyChat: Using cached captchaToken.")
elif not cls.looked and cls.share_url:
cls.looked = True
try:
debug.log("No cache file found, trying to fetch from share URL.")
response = requests.get(cls.share_url, params={
"prompt": get_last_user_message(messages),
"model": model,
"provider": cls.__name__
})
raise_for_status(response)
text, *sub = response.text.split("\n" * 10 + "<!--", 1)
if sub:
debug.log("Save args to cache file:", str(cache_file))
with cache_file.open("w") as f:
f.write(sub[0].strip())
yield text
finally:
cls.looked = False
return
for _ in range(2):
if not args:
args = await get_args_from_nodriver(cls.url, proxy=proxy, callback=callback, user_data_dir=None)
if extra_body is None:
extra_body = {}
extra_body.setdefault("captchaToken", cls.captchaToken)
try:
last_chunk = None
async for chunk in super().create_async_generator(
model=model,
messages=messages,
stream=True,
extra_body=extra_body,
**{
**args,
"headers": {
"X-Guest-Id": cls.guestId,
**args.get("headers", {})
}
},
**kwargs
):
# Remove provided by
if last_chunk == "\n" and chunk == "\n":
break
last_chunk = chunk
yield chunk
except Exception as e:
if "CLEAR-CAPTCHA-TOKEN" in str(e):
debug.log("EasyChat: Captcha token expired, clearing cache file.")
cache_file.unlink(missing_ok=True)
args = None
continue
raise e
break
if not args:
raise ValueError("Failed to retrieve arguments for EasyChat.")
if os.getenv("G4F_SHARE_AUTH"):
yield "\n" * 10
yield "<!--"
yield json.dumps({**args, "captchaToken": cls.captchaToken, "guestId": cls.guestId})
with cache_file.open("w") as f:
json.dump({**args, "captchaToken": cls.captchaToken, "guestId": cls.guestId}, f) |