1
Fork 0
mirror of https://github.com/thegeneralist01/twitter-openapi synced 2026-01-10 15:20:26 +01:00

update tools

Signed-off-by: ふぁ <yuki@yuki0311.com>
This commit is contained in:
ふぁ 2024-09-27 11:12:52 +09:00
parent 29a38318e8
commit 4ffbb5e063
No known key found for this signature in database
GPG key ID: 83A8A5E74872A8AA
9 changed files with 158 additions and 22 deletions

15
.vscode/settings.json vendored
View file

@ -6,14 +6,23 @@
"yaml.schemas": {
"https://raw.githubusercontent.com/OAI/OpenAPI-Specification/main/schemas/v3.0/schema.json": "src/**/*.yaml"
},
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[jsonc]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[yaml]": {
"editor.defaultFormatter": "redhat.vscode-yaml"
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[markdown]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
// "python.analysis.typeCheckingMode": "basic",
"[python]": {
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit",
"source.fixAll": "explicit",
},
"source.fixAll": "explicit"
}
}
}

4
.vscode/tasks.json vendored
View file

@ -32,14 +32,14 @@
"command": [
"python3 -m venv .venv;",
".venv/bin/python3 -m pip install -r requirements.txt;",
"wget https://repo1.maven.org/maven2/org/openapitools/openapi-generator-cli/7.4.0/openapi-generator-cli-7.4.0.jar -O openapi-generator-cli.jar;"
"wget https://repo1.maven.org/maven2/org/openapitools/openapi-generator-cli/7.8.0/openapi-generator-cli-7.8.0.jar -O openapi-generator-cli.jar;"
]
},
"windows": {
"command": [
"python -m venv .venv;",
".venv/Scripts/python -m pip install -r requirements.txt;",
"Invoke-WebRequest https://repo1.maven.org/maven2/org/openapitools/openapi-generator-cli/7.4.0/openapi-generator-cli-7.4.0.jar -OutFile openapi-generator-cli.jar;"
"Invoke-WebRequest https://repo1.maven.org/maven2/org/openapitools/openapi-generator-cli/7.8.0/openapi-generator-cli-7.8.0.jar -OutFile openapi-generator-cli.jar;"
]
}
}

View file

@ -7,9 +7,7 @@
"latestControlAvailable": true,
"requestContext": "launch",
"withCommunity": true,
"seenTweetIds": [
"1349129669258448897"
]
"seenTweetIds": ["1349129669258448897"]
},
"features": {
"rweb_tipjar_consumption_enabled": true,
@ -46,9 +44,7 @@
"includePromotedContent": true,
"latestControlAvailable": true,
"requestContext": "launch",
"seenTweetIds": [
"1349129669258448897"
]
"seenTweetIds": ["1349129669258448897"]
},
"features": {
"rweb_tipjar_consumption_enabled": true,
@ -156,9 +152,7 @@
"UsersByRestIds": {
"queryId": "itEhGywpgX9b3GJCzOtSrA",
"variables": {
"userIds": [
"44196397"
]
"userIds": ["44196397"]
},
"features": {
"responsive_web_graphql_exclude_directive_enabled": true,

View file

@ -277,7 +277,21 @@ components:
properties:
type:
type: string
enum: [TimelineCoverBehaviorDismiss]
enum: [TimelineCoverBehaviorDismiss, TimelineCoverBehaviorNavigate]
url:
type: TimelineCoverBehaviorUrl
TimelineCoverBehaviorUrl:
required:
- "url"
- "url_type"
properties:
url:
type: string
format: uri
url_type:
type: string
enum: ["ExternalUrl"]
Callback:
required:

View file

@ -7,8 +7,6 @@ paths: {}
components:
schemas:
TimelineV2:
required:
- "timeline"
properties:
timeline:
$ref: "#/components/schemas/Timeline"

View file

@ -1464,8 +1464,6 @@ components:
type: integer
AllowDownloadStatus:
required:
- "allow_download"
properties:
allow_download:
type: boolean

View file

@ -96,6 +96,8 @@ components:
$ref: "#/components/schemas/UserTipJarSettings"
legacy_extended_profile:
$ref: "#/components/schemas/UserLegacyExtendedProfile"
has_hidden_likes_on_profile:
type: boolean
UserProfessional:
required:

View file

@ -1,6 +1,7 @@
import base64
import concurrent.futures
import glob
import inspect
import json
import logging
import os
@ -180,6 +181,8 @@ if __name__ == "__main__":
f'cookie.json not found. Please run `{"; ".join(commands)}` first.'
)
if isinstance(cookies, list):
cookies = {k["name"]: k["value"] for k in cookies}
cookies_str = "; ".join([f"{k}={v}" for k, v in cookies.items()])
with open("src/config/placeholder.json", "r") as f:
@ -231,7 +234,7 @@ if __name__ == "__main__":
error_count = 0
for x in [pt.DefaultApi, pt.TweetApi, pt.UserApi, pt.UsersApi, pt.UserListApi]:
for props, fn in x.__dict__.items():
for props, fn in inspect.getmembers(x):
if not callable(fn):
continue
if props.startswith("__") or not props.endswith("_with_http_info"):

118
tools/generater.py Normal file
View file

@ -0,0 +1,118 @@
# https://github.com/tsukumijima/KonomiTV/blob/master/server/misc/TwitterAPIQueryGenerator.py
# https://github.com/tsukumijima/KonomiTV/blob/master/License.txt
#!/usr/bin/env python3
# Usage: poetry run python -m misc.TwitterAPIQueryGenerator
import json
import re
import urllib.parse
from rich import print
from rich.rule import Rule
def main():
print(Rule(characters="="))
print(
"Chrome DevTools の Network タブで「表示されているものをすべてfetch としてコピー」したコードを`input.js`に貼り付けてください。"
)
print("Enter を押すと続行します。")
print(Rule(characters="="))
input()
with open("./tools/input.js", "r") as f:
fetch_code_raw = f.read()
print(Rule(characters="="))
splited = fetch_code_raw.split("\n")
fetch_code_list = []
code = ""
for line in splited:
if line.startswith("fetch("):
if code:
fetch_code_list.append(code)
code = line
else:
code += line + "\n"
fetch_code_list.append(code)
for fetch_code in fetch_code_list:
# query_idとendpointを抽出
query_id_match = re.search(r'/i/api/graphql/([^/]+)/([^"?]+)', fetch_code)
if not query_id_match:
print("query_id と endpoint の抽出に失敗しました。")
print(Rule(characters="="))
return
query_id = query_id_match.group(1)
endpoint = query_id_match.group(2)
# リクエストメソッドを判定
method_match = re.search(r'"method"\s*:\s*"(GET|POST)"', fetch_code)
if not method_match:
print("リクエストメソッドの判定に失敗しました。")
print(Rule(characters="="))
return
method = method_match.group(1)
if method == "POST":
# POST リクエストの場合、fetch() コードの第二引数にある {} で囲まれたオブジェクトを正規表現で抽出したものを JSON としてパース
body_match = re.search(r'"body"\s*:\s*"({.*})"', fetch_code, re.DOTALL)
if not body_match:
print("body の抽出に失敗しました。")
print(Rule(characters="="))
return
body_json_str = body_match.group(1).replace("\\", "")
body_json = json.loads(body_json_str)
features = body_json.get("features", None)
else:
# GET リクエストの場合、まず URL を抽出
url_match = re.search(r'"(https?://[^"]+)"', fetch_code)
if not url_match:
print("URL の抽出に失敗しました。")
print(Rule(characters="="))
return
url = url_match.group(1)
# URL をパースして query string を取得
parsed_url = urllib.parse.urlparse(url)
query_string = parsed_url.query
# query string を dict 形式にパース
query_dict = urllib.parse.parse_qs(query_string)
# features を取得
features_json_str = query_dict.get("features", [None])[0]
if features_json_str is None:
features = None
else:
try:
features = json.loads(features_json_str)
except json.JSONDecodeError:
print(
"features の JSON パースに失敗しました。features は None として続行します。"
)
features = None
# features を JSON としてフォーマットした後、Python の dict として正しい形式に変換
# " を ' に置換し、true/false を True/False に置換
# 生成するコードをフォーマット
# ファイルに書き込む
with open("./src/config/placeholder.json", "r") as f:
placeholder = json.load(f)
with open("./src/config/placeholder.json", "w") as f:
placeholder[endpoint] = {
**placeholder.get(endpoint, {}),
"queryId": query_id,
"features": features,
}
json.dump(placeholder, f, indent=4)
if __name__ == "__main__":
main()