处理了新增的签名问题。截止 20251007 有效
加上轮询,上下文和转 OpenAI 逻辑就是一个成熟的 2api 项目
对于带历史对话的,参数 t 是最近一次 user content
防止有人不知道还是提一下,token 来自网页 cookie 的 token 值,目前看来是长期有效python<br />import time, hmac, hashlib, requests, uuid, json, base64<br /><br />token = ""<br /><br />def decode_jwt_payload(token):<br /> parts = token.split('.')<br /> payload = parts[1]<br /><br /> padding = 4 - len(payload) % 4<br /> if padding != 4:<br /> payload += '=' * padding<br /><br /> decoded = base64.urlsafe_b64decode(payload)<br /> return json.loads(decoded)<br /><br />def zs(e, t, timestamp):<br /> r = str(timestamp)<br /> i = f"{e}|{t}|{r}"<br /> n = timestamp // (5 * 60 * 1000)<br /> key = "junjie".encode('utf-8')<br /> o = hmac.new(key, str(n).encode('utf-8'), hashlib.sha256).hexdigest()<br /> signature = hmac.new(o.encode('utf-8'), i.encode('utf-8'), hashlib.sha256).hexdigest()<br /><br /> return {<br /> "signature": signature,<br /> "timestamp": timestamp<br /> }<br /><br />def make_request():<br /> payload = decode_jwt_payload(token)<br /> user_id = payload['id']<br /> chat_id = str(uuid.uuid4())<br /> timestamp = int(time.time() * 1000)<br /> request_id = str(uuid.uuid4())<br /><br /> t = input("Hello, how can I help you ?\n - ")<br /><br /> e = f"requestId,{request_id},timestamp,{timestamp},user_id,{user_id}"<br /><br /> result = zs(e, t, timestamp)<br /> signature = result["signature"]<br /><br /> url = "<a href="https://chat.z.ai/api/chat/completions" rel="nofollow noopener" target="_blank">https://chat.z.ai/api/chat/completions</a>"<br /> params = {<br /> "timestamp": timestamp,<br /> "requestId": request_id,<br /> "user_id": user_id,<br /> "token": token,<br /> "current_url": f"<a href="https://chat.z.ai/c/" rel="nofollow noopener" target="_blank">https://chat.z.ai/c/</a>{chat_id}",<br /> "pathname": f"/c/{chat_id}",<br /> "signature_timestamp": timestamp<br /> }<br /><br /> headers = {<br /> "Authorization": f"Bearer {token}",<br /> "X-FE-Version": "prod-fe-1.0.95",<br /> "X-Signature": signature<br /> }<br /><br /> payload = {<br /> "stream": True,<br /> "model": "GLM-4-6-API-V1",<br /> "messages": [<br /> {"role": "user", "content": t}<br /> ],<br /> "params": {},<br /> "features": {<br /> "image_generation": False,<br /> "web_search": False,<br /> "auto_web_search": False,<br /> "preview_mode": True,<br /> },<br /> "enable_thinking": True,<br /> "chat_id": chat_id,<br /> "id": str(uuid.uuid4())<br /> }<br /><br /> response = <a href="http://requests.post" rel="nofollow noopener" target="_blank">requests.post</a>(url, params=params, headers=headers, json=payload, stream=True)<br /> response.raise_for_status()<br /><br /> for chunk in response.iter_content(chunk_size=8192):<br /> if chunk:<br /> print(chunk.decode('utf-8'), end='')<br /><br />if __name__ == "__main__":<br /> make_request()<br />
