👾Add GitHub issue search action for my code

This commit is contained in:
Fundor333
2025-10-22 19:35:32 +02:00
parent abc191941a
commit ba1a8f75c7
2 changed files with 146 additions and 0 deletions

View File

@@ -0,0 +1,44 @@
name: Cron Github issue
on:
schedule:
- cron: "0 0 * * *"
push:
branches:
- main
pull_request:
branches:
- main
workflow_dispatch:
jobs:
webmentions:
runs-on: ubuntu-latest
steps:
- name: Check out repository
uses: actions/checkout@master
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: "3.12"
cache: "pip" # caching pip dependencies
- name: Install Pip dependencies
run: pip install -r requirements.txt
- name: Fetch Github Issiue
run: python ./action_script/github_issue.py
- name: Commit to repository
env:
GITHUB_TOKEN: ${{ secrets.TOKEN }}
COMMIT_MSG: |
👾Fetch webmentions
skip-checks: true
run: |
git config user.email "git@fundor333.com"
git config user.name "fundor333"
git remote set-url origin https://x-access-token:${GITHUB_TOKEN}@github.com/fundor333/fundor333.github.io.git
git checkout main
git add .
git diff --quiet && git diff --staged --quiet || (git commit -m "${COMMIT_MSG}"; git push origin main)

View File

@@ -0,0 +1,102 @@
import requests
import json
import os
import logging
from typing import Any
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s - %(levelname)s - %(message)s",
handlers=[logging.StreamHandler()],
)
logger = logging.getLogger(__name__)
def search_issues_github(website: str) -> dict[str, Any]:
query = '"' + website + '" in:body,title type:issue, -author:fundor333 '
logger.info(f"Query: {query}")
api_url = "https://api.github.com/search/issues"
params: dict = {
"q": query,
"s": "created",
"order": "desc",
}
found_issues = []
page = 1
max_pages = 10
os.makedirs(os.path.join("data", "github"), exist_ok=True)
while page <= max_pages:
params["page"] = page
logger.debug(f"Fetching page {page}...")
try:
response = requests.get(api_url, params=params, timeout=15)
# Raises an exception for HTTP errors (4xx or 5xx)
response.raise_for_status()
data = response.json()
items = data.get("items", [])
logger.debug(f"Page {page} returned {len(items)} items.")
if not items:
logger.info(
f"No more results found after page {page - 1}. Stopping search."
)
break
for issue in items:
reduced_issue = {
"id": issue.get("id"),
"number": issue.get("number"),
"title": issue.get("title"),
"url": issue.get("html_url"),
"repository": issue.get("repository_url", "").replace(
"https://api.github.com/repos/", ""
),
"created_at": issue.get("created_at"),
"state": issue.get("state"),
"author": issue.get("user", {}).get("login"),
"body": issue.get("body"),
}
found_issues.append(reduced_issue)
file_path = os.path.join(
"data", "github", str(issue.get("id")) + ".json"
)
try:
with open(file_path, "w", encoding="utf-8") as fp:
json.dump(reduced_issue, fp, ensure_ascii=False, indent=4)
logger.debug(f"Saved issue {issue.get('id')} to {file_path}")
except OSError as file_err:
logger.error(
f"❌ Error writing file for issue {issue.get('id')}: {file_err}"
)
page += 1
except requests.exceptions.RequestException as e:
error_message = f"🛑 API Request Error: {e}"
logger.error(error_message)
return {
"status": "error",
"details": error_message,
"issues_found": len(found_issues),
}
result_summary = f"✅ Found {len(found_issues)} GitHub issues."
logger.info(result_summary)
return {
"status": "success",
"result_summary": result_summary,
"issues_found": len(found_issues),
"website": website,
}
WEBSITE_TO_SEARCH = "fundor333.com"
search_result = search_issues_github(website=WEBSITE_TO_SEARCH)
logger.info(search_result)