mirror of
https://github.com/thegeneralist01/twitter-openapi
synced 2026-01-11 23:50:26 +01:00
update test
Signed-off-by: ふぁ <yuki@yuki0311.com>
This commit is contained in:
parent
cee095e948
commit
bbffeb11ac
3 changed files with 62 additions and 14 deletions
3
.github/workflows/test-python.yaml
vendored
3
.github/workflows/test-python.yaml
vendored
|
|
@ -92,3 +92,6 @@ jobs:
|
||||||
python test/python/test_serialize.py
|
python test/python/test_serialize.py
|
||||||
env:
|
env:
|
||||||
TWITTER_SESSION: ${{ secrets.TWITTER_SESSION }}
|
TWITTER_SESSION: ${{ secrets.TWITTER_SESSION }}
|
||||||
|
ERROR_UNCATCHED: "False"
|
||||||
|
SLEEP_TIME: 2
|
||||||
|
CUESOR_TEST_COUNT: 10
|
||||||
|
|
|
||||||
|
|
@ -3,13 +3,18 @@ import os
|
||||||
import logging
|
import logging
|
||||||
import base64
|
import base64
|
||||||
import openapi_client as pt
|
import openapi_client as pt
|
||||||
|
from pydantic import BaseModel
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.DEBUG, format="%(asctime)s %(levelname)s %(message)s")
|
logging.basicConfig(level=logging.DEBUG, format="%(asctime)s %(levelname)s %(message)s")
|
||||||
logger = logging.getLogger("test_serialize")
|
logger = logging.getLogger("test_serialize")
|
||||||
|
|
||||||
ERROR_UNCATCHED = os.environ.get("ERROR_UNCATCHED", "false").lower() == "true"
|
ERROR_UNCATCHED = os.environ.get("ERROR_UNCATCHED", "false").lower() == "true"
|
||||||
|
SLEEP_TIME = float(os.environ.get("SLEEP", "0"))
|
||||||
|
CUESOR_TEST_COUNT = int(os.environ.get("CUESOR_TEST_COUNT", "3"))
|
||||||
|
|
||||||
|
|
||||||
if Path("cookie.json").exists():
|
if Path("cookie.json").exists():
|
||||||
with open("cookie.json", "r") as f:
|
with open("cookie.json", "r") as f:
|
||||||
|
|
@ -25,11 +30,36 @@ with open("src/config/placeholder.json", "r") as f:
|
||||||
placeholder = json.load(f)
|
placeholder = json.load(f)
|
||||||
|
|
||||||
|
|
||||||
def getKey(snake_str):
|
def get_key(snake_str):
|
||||||
components = snake_str.split("_")
|
components = snake_str.split("_")
|
||||||
return "".join(x.title() for x in components[1:])
|
return "".join(x.title() for x in components[1:])
|
||||||
|
|
||||||
|
|
||||||
|
def get_cursor(obj):
|
||||||
|
res = []
|
||||||
|
if type(obj) == dict:
|
||||||
|
if obj.get("__typename") is pt.TypeName.TIMELINETIMELINECURSOR:
|
||||||
|
res.append(obj["value"])
|
||||||
|
else:
|
||||||
|
for v in obj.values():
|
||||||
|
res.extend(get_cursor(v))
|
||||||
|
elif type(obj) == list:
|
||||||
|
for v in obj:
|
||||||
|
res.extend(get_cursor(v))
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
def get_kwargs(key, additional):
|
||||||
|
kwargs = {"path_query_id": placeholder[key]["queryId"]}
|
||||||
|
if placeholder[key].get("variables") is not None:
|
||||||
|
kwargs["variables"] = json.dumps(placeholder[key]["variables"] | additional)
|
||||||
|
if placeholder[key].get("features") is not None:
|
||||||
|
kwargs["features"] = json.dumps(placeholder[key]["features"])
|
||||||
|
if placeholder[key].get("fieldToggles") is not None:
|
||||||
|
kwargs["field_toggles"] = json.dumps(placeholder[key]["fieldToggles"])
|
||||||
|
return kwargs
|
||||||
|
|
||||||
|
|
||||||
api_conf = pt.Configuration(
|
api_conf = pt.Configuration(
|
||||||
api_key={
|
api_key={
|
||||||
"ClientLanguage": "en",
|
"ClientLanguage": "en",
|
||||||
|
|
@ -53,22 +83,28 @@ for x in [pt.DefaultApi, pt.TweetApi, pt.UserApi, pt.UserListApi]:
|
||||||
if props.startswith("__") or props.endswith("_with_http_info"):
|
if props.startswith("__") or props.endswith("_with_http_info"):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
key = getKey(props)
|
key = get_key(props)
|
||||||
logger.info(f"Try: {key}")
|
cursor_list = set([None])
|
||||||
|
cursor_history = set()
|
||||||
kwargs = {"path_query_id": placeholder[key]["queryId"]}
|
|
||||||
if placeholder[key].get("variables") is not None:
|
|
||||||
kwargs["variables"] = json.dumps(placeholder[key]["variables"])
|
|
||||||
if placeholder[key].get("features") is not None:
|
|
||||||
kwargs["features"] = json.dumps(placeholder[key]["features"])
|
|
||||||
if placeholder[key].get("fieldToggles") is not None:
|
|
||||||
kwargs["field_toggles"] = json.dumps(placeholder[key]["fieldToggles"])
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
res = getattr(x(api_client), props)(**kwargs)
|
for _ in range(CUESOR_TEST_COUNT):
|
||||||
|
cursor = cursor_list.pop()
|
||||||
|
cursor_history.add(cursor)
|
||||||
|
logger.info(f"Try: {key} {cursor}")
|
||||||
|
|
||||||
|
kwargs = get_kwargs(key, {} if cursor is None else {"cursor": cursor})
|
||||||
|
res: BaseModel = getattr(x(api_client), props)(**kwargs)
|
||||||
|
|
||||||
|
cursor_list.update(set(get_cursor(res.to_dict())) - cursor_history)
|
||||||
|
|
||||||
|
if len(cursor_list) == 0:
|
||||||
|
break
|
||||||
|
time.sleep(SLEEP_TIME)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if ERROR_UNCATCHED:
|
if ERROR_UNCATCHED:
|
||||||
raise e
|
raise
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
logger.error("==========[STACK TRACE]==========")
|
logger.error("==========[STACK TRACE]==========")
|
||||||
|
|
|
||||||
|
|
@ -109,6 +109,15 @@ class RemoveDiscriminator(SchemasHookBase):
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class SchemasCheck(SchemasHookBase):
|
||||||
|
def hook(self, value: dict):
|
||||||
|
if value.get("allOf") is not None:
|
||||||
|
print(f"allOf is used")
|
||||||
|
if value.get("type") is None:
|
||||||
|
print("Type is None")
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
# RequestHookBase extends
|
# RequestHookBase extends
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue