mirror of
https://github.com/XTLS/Xray-core.git
synced 2024-11-25 18:11:27 +00:00
transfer geodat with actions/cache
This commit is contained in:
parent
2d898480be
commit
cc4be239cf
69
.github/workflows/release.yml
vendored
69
.github/workflows/release.yml
vendored
|
@ -20,7 +20,51 @@ on:
|
|||
- "go.sum"
|
||||
- ".github/workflows/*.yml"
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Restore Cache
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
path: resources
|
||||
key: xray-geodat-
|
||||
|
||||
- name: Update Geodat
|
||||
id: update
|
||||
uses: nick-fields/retry@v2
|
||||
with:
|
||||
timeout_minutes: 60
|
||||
retry_wait_seconds: 60
|
||||
max_attempts: 60
|
||||
command: |
|
||||
[ -d 'resources' ] || mkdir resources
|
||||
LIST=('geoip geoip geoip' 'domain-list-community dlc geosite')
|
||||
for i in "${LIST[@]}"
|
||||
do
|
||||
INFO=($(echo $i | awk 'BEGIN{FS=" ";OFS=" "} {print $1,$2,$3}'))
|
||||
FILE_NAME="${INFO[2]}.dat"
|
||||
echo -e "Verifying HASH key..."
|
||||
HASH="$(curl -sL "https://raw.githubusercontent.com/v2fly/${INFO[0]}/release/${INFO[1]}.dat.sha256sum" | awk -F ' ' '{print $1}')"
|
||||
if [ -s "./resources/${FILE_NAME}" ] && [ "$(sha256sum "./resources/${FILE_NAME}" | awk -F ' ' '{print $1}')" == "${HASH}" ]; then
|
||||
continue
|
||||
else
|
||||
echo -e "Downloading https://raw.githubusercontent.com/v2fly/${INFO[0]}/release/${INFO[1]}.dat..."
|
||||
curl -L "https://raw.githubusercontent.com/v2fly/${INFO[0]}/release/${INFO[1]}.dat" -o ./resources/${FILE_NAME}
|
||||
echo -e "Verifying HASH key..."
|
||||
[ "$(sha256sum "./resources/${FILE_NAME}" | awk -F ' ' '{print $1}')" == "${HASH}" ] || { echo -e "The HASH key of ${FILE_NAME} does not match cloud one."; exit 1; }
|
||||
echo "unhit=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Save Cache
|
||||
uses: actions/cache/save@v3
|
||||
if: ${{ steps.update.outputs.unhit }}
|
||||
with:
|
||||
path: resources
|
||||
key: xray-geodat-${{ github.sha }}-${{ github.run_number }}
|
||||
|
||||
build:
|
||||
needs: prepare
|
||||
permissions:
|
||||
contents: write
|
||||
strategy:
|
||||
|
@ -160,26 +204,17 @@ jobs:
|
|||
cd ./build_assets || exit 1
|
||||
mv xray xray.exe
|
||||
|
||||
- name: Prepare to release
|
||||
uses: nick-fields/retry@v2
|
||||
- name: Restore Cache
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
timeout_minutes: 60
|
||||
retry_wait_seconds: 60
|
||||
max_attempts: 60
|
||||
command: |
|
||||
path: resources
|
||||
key: xray-geodat-
|
||||
|
||||
- name: Copy README.md & LICENSE
|
||||
run: |
|
||||
mv -f resources/* build_assets
|
||||
cp ${GITHUB_WORKSPACE}/README.md ./build_assets/README.md
|
||||
cp ${GITHUB_WORKSPACE}/LICENSE ./build_assets/LICENSE
|
||||
LIST=('geoip geoip geoip' 'domain-list-community dlc geosite')
|
||||
for i in "${LIST[@]}"
|
||||
do
|
||||
INFO=($(echo $i | awk 'BEGIN{FS=" ";OFS=" "} {print $1,$2,$3}'))
|
||||
FILE_NAME="${INFO[2]}.dat"
|
||||
echo -e "Downloading https://raw.githubusercontent.com/v2fly/${INFO[0]}/release/${INFO[1]}.dat..."
|
||||
curl -L "https://raw.githubusercontent.com/v2fly/${INFO[0]}/release/${INFO[1]}.dat" -o ./build_assets/${FILE_NAME}
|
||||
echo -e "Verifying HASH key..."
|
||||
HASH="$(curl -sL "https://raw.githubusercontent.com/v2fly/${INFO[0]}/release/${INFO[1]}.dat.sha256sum" | awk -F ' ' '{print $1}')"
|
||||
[ "$(sha256sum "./build_assets/${FILE_NAME}" | awk -F ' ' '{print $1}')" == "${HASH}" ] || { echo -e "The HASH key of ${FILE_NAME} does not match cloud one."; exit 1; }
|
||||
done
|
||||
|
||||
- name: Create ZIP archive
|
||||
shell: bash
|
||||
|
|
43
.github/workflows/test.yml
vendored
43
.github/workflows/test.yml
vendored
|
@ -34,44 +34,11 @@ jobs:
|
|||
check-latest: true
|
||||
- name: Checkout codebase
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare geoip
|
||||
if: ${{ matrix.os != 'windows-latest' }}
|
||||
uses: nick-fields/retry@v2
|
||||
- name: Restore Cache
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
timeout_minutes: 60
|
||||
retry_wait_seconds: 30
|
||||
max_attempts: 60
|
||||
command: |
|
||||
mkdir resources
|
||||
wget -O ./resources/geoip.dat https://github.com/v2fly/geoip/releases/latest/download/geoip.dat
|
||||
- name: Prepare geosite
|
||||
if: ${{ matrix.os != 'windows-latest' }}
|
||||
uses: nick-fields/retry@v2
|
||||
with:
|
||||
timeout_minutes: 60
|
||||
retry_wait_seconds: 30
|
||||
max_attempts: 60
|
||||
command: |
|
||||
wget -O ./resources/geosite.dat https://github.com/v2fly/domain-list-community/releases/latest/download/dlc.dat
|
||||
- name: Prepare geoip for Windows
|
||||
if: ${{ matrix.os == 'windows-latest' }}
|
||||
uses: nick-fields/retry@v2
|
||||
with:
|
||||
timeout_minutes: 60
|
||||
retry_wait_seconds: 30
|
||||
max_attempts: 60
|
||||
command: |
|
||||
mkdir resources
|
||||
Invoke-WebRequest -Uri "https://github.com/v2fly/geoip/releases/latest/download/geoip.dat" -OutFile "./resources/geoip.dat"
|
||||
- name: Prepare geosite for Windows
|
||||
if: ${{ matrix.os == 'windows-latest' }}
|
||||
uses: nick-fields/retry@v2
|
||||
with:
|
||||
timeout_minutes: 60
|
||||
retry_wait_seconds: 30
|
||||
max_attempts: 60
|
||||
command: |
|
||||
Invoke-WebRequest -Uri "https://github.com/v2fly/domain-list-community/releases/latest/download/dlc.dat" -OutFile "./resources/geosite.dat"
|
||||
path: resources
|
||||
key: xray-geodat-
|
||||
enableCrossOsArchive: true
|
||||
- name: Test
|
||||
run: go test -timeout 1h -v ./...
|
||||
|
|
Loading…
Reference in a new issue