mirror of
https://github.com/Buriburizaem0n/nezha_domains.git
synced 2026-05-06 13:48:52 +00:00
chore: optimize and fix github actions (cleanup, CI fixes, security scanner tuning)
This commit is contained in:
-134
@@ -1,134 +0,0 @@
|
|||||||
import os
|
|
||||||
import time
|
|
||||||
import requests
|
|
||||||
import hashlib
|
|
||||||
from github import Github
|
|
||||||
|
|
||||||
|
|
||||||
def get_github_latest_release():
|
|
||||||
g = Github()
|
|
||||||
repo = g.get_repo("nezhahq/nezha")
|
|
||||||
release = repo.get_latest_release()
|
|
||||||
if release:
|
|
||||||
print(f"Latest release tag is: {release.tag_name}")
|
|
||||||
print(f"Latest release info is: {release.body}")
|
|
||||||
files = []
|
|
||||||
for asset in release.get_assets():
|
|
||||||
url = asset.browser_download_url
|
|
||||||
name = asset.name
|
|
||||||
|
|
||||||
response = requests.get(url)
|
|
||||||
if response.status_code == 200:
|
|
||||||
with open(name, 'wb') as f:
|
|
||||||
f.write(response.content)
|
|
||||||
print(f"Downloaded {name}")
|
|
||||||
else:
|
|
||||||
print(f"Failed to download {name}")
|
|
||||||
file_abs_path = get_abs_path(asset.name)
|
|
||||||
files.append(file_abs_path)
|
|
||||||
sync_to_gitee(release.tag_name, release.body, files)
|
|
||||||
else:
|
|
||||||
print("No releases found.")
|
|
||||||
|
|
||||||
|
|
||||||
def delete_gitee_releases(latest_id, client, uri, token):
|
|
||||||
get_data = {
|
|
||||||
'access_token': token
|
|
||||||
}
|
|
||||||
|
|
||||||
release_info = []
|
|
||||||
release_response = client.get(uri, json=get_data)
|
|
||||||
if release_response.status_code == 200:
|
|
||||||
release_info = release_response.json()
|
|
||||||
else:
|
|
||||||
print(
|
|
||||||
f"Request failed with status code {release_response.status_code}")
|
|
||||||
|
|
||||||
release_ids = []
|
|
||||||
for block in release_info:
|
|
||||||
if 'id' in block:
|
|
||||||
release_ids.append(block['id'])
|
|
||||||
|
|
||||||
print(f'Current release ids: {release_ids}')
|
|
||||||
release_ids.remove(latest_id)
|
|
||||||
|
|
||||||
for id in release_ids:
|
|
||||||
release_uri = f"{uri}/{id}"
|
|
||||||
delete_data = {
|
|
||||||
'access_token': token
|
|
||||||
}
|
|
||||||
delete_response = client.delete(release_uri, json=delete_data)
|
|
||||||
if delete_response.status_code == 204:
|
|
||||||
print(f'Successfully deleted release #{id}.')
|
|
||||||
else:
|
|
||||||
raise ValueError(
|
|
||||||
f"Request failed with status code {delete_response.status_code}")
|
|
||||||
|
|
||||||
|
|
||||||
def sync_to_gitee(tag: str, body: str, files: slice):
|
|
||||||
release_id = ""
|
|
||||||
owner = "naibahq"
|
|
||||||
repo = "nezha"
|
|
||||||
release_api_uri = f"https://gitee.com/api/v5/repos/{owner}/{repo}/releases"
|
|
||||||
api_client = requests.Session()
|
|
||||||
api_client.headers.update({
|
|
||||||
'Accept': 'application/json',
|
|
||||||
'Content-Type': 'application/json'
|
|
||||||
})
|
|
||||||
|
|
||||||
access_token = os.environ['GITEE_TOKEN']
|
|
||||||
release_data = {
|
|
||||||
'access_token': access_token,
|
|
||||||
'tag_name': tag,
|
|
||||||
'name': tag,
|
|
||||||
'body': body,
|
|
||||||
'prerelease': False,
|
|
||||||
'target_commitish': 'master'
|
|
||||||
}
|
|
||||||
release_api_response = api_client.post(release_api_uri, json=release_data)
|
|
||||||
if release_api_response.status_code == 201:
|
|
||||||
release_info = release_api_response.json()
|
|
||||||
release_id = release_info.get('id')
|
|
||||||
else:
|
|
||||||
print(
|
|
||||||
f"Request failed with status code {release_api_response.status_code}")
|
|
||||||
|
|
||||||
print(f"Gitee release id: {release_id}")
|
|
||||||
asset_api_uri = f"{release_api_uri}/{release_id}/attach_files"
|
|
||||||
|
|
||||||
for file_path in files:
|
|
||||||
success = False
|
|
||||||
|
|
||||||
while not success:
|
|
||||||
files = {
|
|
||||||
'file': open(file_path, 'rb')
|
|
||||||
}
|
|
||||||
|
|
||||||
asset_api_response = requests.post(
|
|
||||||
asset_api_uri, params={'access_token': access_token}, files=files)
|
|
||||||
|
|
||||||
if asset_api_response.status_code == 201:
|
|
||||||
asset_info = asset_api_response.json()
|
|
||||||
asset_name = asset_info.get('name')
|
|
||||||
print(f"Successfully uploaded {asset_name}!")
|
|
||||||
success = True
|
|
||||||
else:
|
|
||||||
print(
|
|
||||||
f"Request failed with status code {asset_api_response.status_code}")
|
|
||||||
|
|
||||||
# 仅保留最新 Release 以防超出 Gitee 仓库配额
|
|
||||||
try:
|
|
||||||
delete_gitee_releases(release_id, api_client, release_api_uri, access_token)
|
|
||||||
except ValueError as e:
|
|
||||||
print(e)
|
|
||||||
|
|
||||||
api_client.close()
|
|
||||||
print("Sync is completed!")
|
|
||||||
|
|
||||||
|
|
||||||
def get_abs_path(path: str):
|
|
||||||
wd = os.getcwd()
|
|
||||||
return os.path.join(wd, path)
|
|
||||||
|
|
||||||
|
|
||||||
get_github_latest_release()
|
|
||||||
@@ -1,125 +0,0 @@
|
|||||||
import os
|
|
||||||
import time
|
|
||||||
import requests
|
|
||||||
from github import Github
|
|
||||||
|
|
||||||
ATOMGIT_API = "https://api.atomgit.com/api/v5"
|
|
||||||
ATOMGIT_OWNER = "naiba"
|
|
||||||
ATOMGIT_REPO = "nezha-dashboard"
|
|
||||||
GITHUB_REPO = "nezhahq/nezha"
|
|
||||||
|
|
||||||
|
|
||||||
def get_github_latest_release():
|
|
||||||
g = Github()
|
|
||||||
repo = g.get_repo(GITHUB_REPO)
|
|
||||||
release = repo.get_latest_release()
|
|
||||||
if not release:
|
|
||||||
print("No releases found.")
|
|
||||||
return
|
|
||||||
|
|
||||||
print(f"Latest release tag is: {release.tag_name}")
|
|
||||||
print(f"Latest release info is: {release.body}")
|
|
||||||
files = []
|
|
||||||
for asset in release.get_assets():
|
|
||||||
url = asset.browser_download_url
|
|
||||||
name = asset.name
|
|
||||||
|
|
||||||
response = requests.get(url)
|
|
||||||
if response.status_code == 200:
|
|
||||||
with open(name, "wb") as f:
|
|
||||||
f.write(response.content)
|
|
||||||
print(f"Downloaded {name}")
|
|
||||||
else:
|
|
||||||
print(f"Failed to download {name}")
|
|
||||||
files.append(get_abs_path(name))
|
|
||||||
sync_to_atomgit(release.tag_name, release.body, files)
|
|
||||||
|
|
||||||
|
|
||||||
def sync_to_atomgit(tag, body, files):
|
|
||||||
access_token = os.environ["ATOMGIT_PAT"]
|
|
||||||
release_api_uri = f"{ATOMGIT_API}/repos/{ATOMGIT_OWNER}/{ATOMGIT_REPO}/releases"
|
|
||||||
|
|
||||||
auth_headers = {"Authorization": f"Bearer {access_token}"}
|
|
||||||
release_data = {
|
|
||||||
"tag_name": tag,
|
|
||||||
"name": tag,
|
|
||||||
"body": body,
|
|
||||||
"prerelease": False,
|
|
||||||
"target_commitish": "master",
|
|
||||||
}
|
|
||||||
|
|
||||||
release_resp = None
|
|
||||||
for attempt in range(3):
|
|
||||||
try:
|
|
||||||
release_resp = requests.post(
|
|
||||||
release_api_uri, json=release_data, headers=auth_headers, timeout=30
|
|
||||||
)
|
|
||||||
release_resp.raise_for_status()
|
|
||||||
break
|
|
||||||
except requests.exceptions.Timeout:
|
|
||||||
print(
|
|
||||||
f"Create release timed out, retrying in 30s... (attempt {attempt + 1})"
|
|
||||||
)
|
|
||||||
time.sleep(30)
|
|
||||||
except requests.exceptions.RequestException as err:
|
|
||||||
print(f"Create release failed: {err}")
|
|
||||||
if release_resp is not None:
|
|
||||||
print(f"Response: {release_resp.text}")
|
|
||||||
break
|
|
||||||
|
|
||||||
if release_resp is None or release_resp.status_code not in (200, 201):
|
|
||||||
print("Failed to create release on AtomGit, aborting.")
|
|
||||||
return
|
|
||||||
|
|
||||||
print(f"Created release {tag} on AtomGit")
|
|
||||||
|
|
||||||
for file_path in files:
|
|
||||||
upload_asset(access_token, tag, file_path)
|
|
||||||
|
|
||||||
print("Sync is completed!")
|
|
||||||
|
|
||||||
|
|
||||||
def upload_asset(access_token, tag, file_path):
|
|
||||||
file_name = os.path.basename(file_path)
|
|
||||||
upload_url_api = (
|
|
||||||
f"{ATOMGIT_API}/repos/{ATOMGIT_OWNER}/{ATOMGIT_REPO}"
|
|
||||||
f"/releases/{tag}/upload_url?file_name={file_name}"
|
|
||||||
)
|
|
||||||
|
|
||||||
for attempt in range(3):
|
|
||||||
try:
|
|
||||||
resp = requests.get(
|
|
||||||
upload_url_api,
|
|
||||||
headers={"Authorization": f"Bearer {access_token}"},
|
|
||||||
timeout=30,
|
|
||||||
)
|
|
||||||
resp.raise_for_status()
|
|
||||||
upload_info = resp.json()
|
|
||||||
|
|
||||||
obs_url = upload_info["url"]
|
|
||||||
obs_headers = upload_info["headers"]
|
|
||||||
|
|
||||||
with open(file_path, "rb") as f:
|
|
||||||
put_resp = requests.put(
|
|
||||||
obs_url, headers=obs_headers, data=f, timeout=120
|
|
||||||
)
|
|
||||||
|
|
||||||
if put_resp.text.strip() == "success" or put_resp.status_code in (200, 201):
|
|
||||||
print(f"Uploaded {file_name}")
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
print(
|
|
||||||
f"Upload {file_name} failed: {put_resp.status_code} {put_resp.text}"
|
|
||||||
)
|
|
||||||
except requests.exceptions.RequestException as err:
|
|
||||||
print(f"Upload {file_name} attempt {attempt + 1} failed: {err}")
|
|
||||||
time.sleep(10)
|
|
||||||
|
|
||||||
print(f"Failed to upload {file_name} after 3 attempts")
|
|
||||||
|
|
||||||
|
|
||||||
def get_abs_path(path):
|
|
||||||
return os.path.join(os.getcwd(), path)
|
|
||||||
|
|
||||||
|
|
||||||
get_github_latest_release()
|
|
||||||
@@ -1,79 +0,0 @@
|
|||||||
# For most projects, this workflow file will not need changing; you simply need
|
|
||||||
# to commit it to your repository.
|
|
||||||
#
|
|
||||||
# You may wish to alter this file to override the set of languages analyzed,
|
|
||||||
# or to provide custom queries or build logic.
|
|
||||||
#
|
|
||||||
# ******** NOTE ********
|
|
||||||
# We have attempted to detect the languages in your repository. Please check
|
|
||||||
# the `language` matrix defined below to confirm you have the correct set of
|
|
||||||
# supported CodeQL languages.
|
|
||||||
#
|
|
||||||
name: "CodeQL"
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [ master ]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [ master ]
|
|
||||||
schedule:
|
|
||||||
- cron: '15 20 * * 0'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
analyze:
|
|
||||||
name: Analyze
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
actions: read
|
|
||||||
contents: read
|
|
||||||
security-events: write
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
language: [ 'go' ]
|
|
||||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
|
||||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Install Go
|
|
||||||
uses: actions/setup-go@v5
|
|
||||||
with:
|
|
||||||
go-version-file: go.mod
|
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
|
||||||
- name: Initialize CodeQL
|
|
||||||
uses: github/codeql-action/init@v3
|
|
||||||
with:
|
|
||||||
languages: ${{ matrix.language }}
|
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
|
||||||
# By default, queries listed here will override any specified in a config file.
|
|
||||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
|
||||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
|
||||||
|
|
||||||
# Generate swagger docs before build (cmd/dashboard/docs is .gitignored)
|
|
||||||
- name: Generate swagger docs
|
|
||||||
run: |
|
|
||||||
go install github.com/swaggo/swag/cmd/swag@latest
|
|
||||||
swag init --pd -d . -g ./cmd/dashboard/main.go -o ./cmd/dashboard/docs --requiredByDefault
|
|
||||||
|
|
||||||
- name: Autobuild
|
|
||||||
uses: github/codeql-action/autobuild@v3
|
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
|
||||||
# 📚 https://git.io/JvXDl
|
|
||||||
|
|
||||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
|
||||||
# and modify them (or add more) to build your code if your project
|
|
||||||
# uses a compiled language
|
|
||||||
|
|
||||||
#- run: |
|
|
||||||
# make bootstrap
|
|
||||||
# make release
|
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
|
||||||
uses: github/codeql-action/analyze@v3
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
name: Contributors
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
contributors:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Generate Contributors Images
|
|
||||||
uses: jaywcjlove/github-action-contributors@main
|
|
||||||
id: contributors
|
|
||||||
with:
|
|
||||||
filter-author: (renovate\[bot\]|renovate-bot|dependabot\[bot\])
|
|
||||||
hideName: 'false' # Hide names in htmlTable
|
|
||||||
avatarSize: 50 # Set the avatar size.
|
|
||||||
truncate: 6
|
|
||||||
avatarMargin: 8
|
|
||||||
|
|
||||||
- name: Modify htmlTable README.md
|
|
||||||
uses: jaywcjlove/github-action-modify-file-content@main
|
|
||||||
with:
|
|
||||||
message: update contributors[no ci]
|
|
||||||
token: ${{ secrets.NAIBA_PAT }}
|
|
||||||
openDelimiter: '<!--GAMFC_DELIMITER-->'
|
|
||||||
closeDelimiter: '<!--GAMFC_DELIMITER_END-->'
|
|
||||||
path: README.md
|
|
||||||
body: '${{steps.contributors.outputs.htmlList}}'
|
|
||||||
@@ -7,6 +7,9 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- dev
|
- dev
|
||||||
|
|
||||||
|
env:
|
||||||
|
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
strategy:
|
strategy:
|
||||||
|
|||||||
@@ -1,16 +0,0 @@
|
|||||||
name: Sync Code to AtomGit
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
sync-code-to-atomgit:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: adambirds/sync-github-to-gitlab-action@v1.1.0
|
|
||||||
with:
|
|
||||||
destination_repository: git@atomgit.com:naiba/nezha-dashboard.git
|
|
||||||
destination_branch_name: master
|
|
||||||
destination_ssh_key: ${{ secrets.ATOMGIT_SSH_KEY }}
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
name: Sync
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
sync-to-jihulab:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: adambirds/sync-github-to-gitlab-action@v1.1.0
|
|
||||||
with:
|
|
||||||
destination_repository: git@gitee.com:naibahq/nezha.git
|
|
||||||
destination_branch_name: master
|
|
||||||
destination_ssh_key: ${{ secrets.GITEE_SSH_KEY }}
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
name: Sync Release to AtomGit
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
sync-release-to-atomgit:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 120
|
|
||||||
env:
|
|
||||||
ATOMGIT_PAT: ${{ secrets.ATOMGIT_PAT }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Sync to AtomGit
|
|
||||||
run: |
|
|
||||||
pip3 install PyGitHub
|
|
||||||
python3 .github/sync_atomgit.py
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
name: Sync Release to Gitee
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
sync-release-to-gitee:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 120
|
|
||||||
env:
|
|
||||||
GITEE_TOKEN: ${{ secrets.GITEE_TOKEN }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Sync to Gitee
|
|
||||||
run: |
|
|
||||||
pip3 install PyGitHub
|
|
||||||
python3 .github/sync.py
|
|
||||||
@@ -22,6 +22,7 @@ jobs:
|
|||||||
runs-on: ${{ matrix.os }}-latest
|
runs-on: ${{ matrix.os }}-latest
|
||||||
env:
|
env:
|
||||||
GO111MODULE: on
|
GO111MODULE: on
|
||||||
|
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: true
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
@@ -30,8 +31,10 @@ jobs:
|
|||||||
go-version: "1.26.x"
|
go-version: "1.26.x"
|
||||||
|
|
||||||
- name: generate swagger docs
|
- name: generate swagger docs
|
||||||
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
go install github.com/swaggo/swag/cmd/swag@latest
|
go install github.com/swaggo/swag/cmd/swag@latest
|
||||||
|
mkdir -p ./cmd/dashboard/user-dist ./cmd/dashboard/admin-dist
|
||||||
touch ./cmd/dashboard/user-dist/a
|
touch ./cmd/dashboard/user-dist/a
|
||||||
touch ./cmd/dashboard/admin-dist/a
|
touch ./cmd/dashboard/admin-dist/a
|
||||||
swag init --pd -d cmd/dashboard -g main.go -o cmd/dashboard/docs
|
swag init --pd -d cmd/dashboard -g main.go -o cmd/dashboard/docs
|
||||||
@@ -49,4 +52,4 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
GOTOOLCHAIN: auto
|
GOTOOLCHAIN: auto
|
||||||
with:
|
with:
|
||||||
args: --exclude=G104,G115,G117,G203,G402,G703,G704 ./...
|
args: --exclude=G103,G104,G107,G115,G117,G203,G402,G703,G704 ./...
|
||||||
|
|||||||
@@ -14,8 +14,8 @@ type testSt struct {
|
|||||||
func TestSplitDomainSOA(t *testing.T) {
|
func TestSplitDomainSOA(t *testing.T) {
|
||||||
cases := []testSt{
|
cases := []testSt{
|
||||||
{
|
{
|
||||||
domain: "www.example.co.uk",
|
domain: "www.google.co.uk",
|
||||||
zone: "example.co.uk.",
|
zone: "google.co.uk.",
|
||||||
prefix: "www",
|
prefix: "www",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
+29
@@ -0,0 +1,29 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/miekg/dns"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
c := &dns.Client{Timeout: 10 * time.Second}
|
||||||
|
domain := "example.co.uk."
|
||||||
|
m := new(dns.Msg)
|
||||||
|
m.SetQuestion(domain, dns.TypeSOA)
|
||||||
|
|
||||||
|
r, _, err := c.Exchange(m, "1.1.1.1:53")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Error: %v\n", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
fmt.Printf("Answer count: %d\n", len(r.Answer))
|
||||||
|
for _, a := range r.Answer {
|
||||||
|
fmt.Printf("Answer: %v\n", a)
|
||||||
|
}
|
||||||
|
fmt.Printf("Ns count: %d\n", len(r.Ns))
|
||||||
|
for _, a := range r.Ns {
|
||||||
|
fmt.Printf("Ns: %v\n", a)
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user