generated from headquarter8302/mee6-leaderboard-scrape
-
Notifications
You must be signed in to change notification settings - Fork 0
57 lines (45 loc) · 1.6 KB
/
dump.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
name: Fetch and dump levels
on:
# Specify a cadence, don't set it too often
# Uncomment `schedule:` and `- cron:` to activate scheduled running
schedule:
- cron: 0 0 * * *
# Manually trigger a workflow
workflow_dispatch:
# Parameters:
# ITER_COUNT: number
# WAIT: time
# DUMP_FILENAME: string
# LEADERBOARD_URL: url string
env:
# How many pages of the leaderboard to scrape
ITER_COUNT: 300
# How long to wait for each subsequent cURL request
# >>> Don't set this too low or you'll trip Cloudflare's ratelimit
# Make sure you don't set this for too long, or else you either go beyond GitHub's Actions runner time limit, or duplicate the run
WAIT: 30s
# Set the resulting file name here
DUMP_FILENAME: "dump.json"
# Replace the URL here with the URL from the command `!levels`
LEADERBOARD_URL: "https://mee6.xyz/api/plugins/levels/leaderboard/390342113042366465"
jobs:
fetch-and-dump:
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
- name: Remove previous dump
run: "rm ${{ env.DUMP_FILENAME }} || echo 'No file to remove'"
- name: Fetch level dump
run: |
for ((i = 0; i < ${{ env.ITER_COUNT }}; i++)); do
curl "${{ env.LEADERBOARD_URL }}?page=$i" >> ${{ env.DUMP_FILENAME }}
sleep ${{ env.WAIT }}
done
- name: Commit and push changes
run: |
git config user.name "GitHub Actions"
git config user.email "[email protected]"
git add dump.json
git commit -m "Update ${{ env.DUMP_FILENAME }}" || echo "Source is same as repo"
git push