Compare commits
327 commits
main
...
fix-rateli
| Author | SHA1 | Date | |
|---|---|---|---|
| 8c04bbcfdd | |||
| 0e3e029360 | |||
| f2daf93d87 | |||
| 8ba75e0870 | |||
| 7a12af0038 | |||
| 2dd658ef90 | |||
|
|
fe986e6b88 | ||
|
|
87f545da4a | ||
|
|
098695a5c5 | ||
|
|
8a8115bdab | ||
|
|
bfeb949450 | ||
|
|
2904a54978 | ||
|
|
2c417530c7 | ||
|
|
30e10ceaed | ||
|
|
41a2c014d8 | ||
|
|
92f2b18d19 | ||
|
|
828e17cec5 | ||
|
|
26138d0bd1 | ||
|
|
7367547673 | ||
|
|
7354b4ad5d | ||
|
|
63c51c160f | ||
|
|
c651285aec | ||
|
|
37f05c8e1f | ||
|
|
012b13ea08 | ||
|
|
97f29e6185 | ||
|
|
4dba7a4324 | ||
|
|
b417f435db | ||
|
|
4871af3a0f | ||
|
|
47c2ea449c | ||
|
|
855e0c762c | ||
|
|
078c241b88 | ||
|
|
1ed81a15be | ||
|
|
04a772fe9b | ||
|
|
12b4e34e37 | ||
|
|
b5354e5f99 | ||
|
|
05b5de58c1 | ||
|
|
0087d98f40 | ||
|
|
6b854cdd9d | ||
|
|
7b55088dfa | ||
|
|
4f9c6fd5ef | ||
|
|
ba0716e895 | ||
|
|
1b814a01f3 | ||
|
|
2aecdd7d1f | ||
|
|
105d82db10 | ||
|
|
da879a83c3 | ||
|
|
1c79b6ed50 | ||
|
|
a52bbea48d | ||
|
|
3e8e45d323 | ||
|
|
d3c7302af2 | ||
|
|
89bc612820 | ||
|
|
2415fa6b5c | ||
|
|
0726da2792 | ||
|
|
68fe1f82ec | ||
|
|
43bc6dabce | ||
|
|
d8cbf4f473 | ||
|
|
88e01fd811 | ||
|
|
4bfee9c258 | ||
|
|
6174d7e7e4 | ||
|
|
66c7003326 | ||
|
|
159a229350 | ||
|
|
1fb546f8ae | ||
|
|
080890b60c | ||
|
|
9a419ff36e | ||
|
|
c877d579f2 | ||
|
|
75b33e0d2f | ||
|
|
59142549f9 | ||
|
|
f353adfcc0 | ||
|
|
6818984a79 | ||
|
|
d4f9446503 | ||
|
|
194e624cdf | ||
|
|
c426f2afb9 | ||
|
|
85a91034db | ||
|
|
93ce7fab87 | ||
|
|
44b0a265ec | ||
|
|
43c5dc5eaa | ||
|
|
7333694152 | ||
|
|
6ccf3592d3 | ||
|
|
289e662a63 | ||
|
|
52cf1bf861 | ||
|
|
baf73bf0e5 | ||
|
|
daa6b5945e | ||
|
|
54a390c21b | ||
|
|
33755bf5c9 | ||
|
|
14c9947161 | ||
|
|
7a7d790a67 | ||
|
|
6fb9657b52 | ||
|
|
2128e27005 | ||
|
|
f5b014ddc6 | ||
|
|
9f0e36d20c | ||
|
|
edd2cff35d | ||
|
|
c6bfb7dbe1 | ||
|
|
b618e68bea | ||
|
|
3eaebb710b | ||
|
|
e9043aa153 | ||
|
|
f3ce47fdce | ||
|
|
b76da67c38 | ||
|
|
c4b77d446a | ||
|
|
825d95c686 | ||
|
|
c3403d256c | ||
|
|
553dfa3ec4 | ||
|
|
03e5b90ddd | ||
|
|
48678203f7 | ||
|
|
74ef5d14c3 | ||
|
|
9da26905f8 | ||
|
|
ee52157c74 | ||
|
|
ad706af980 | ||
|
|
a295390bb7 | ||
|
|
292672b0ef | ||
|
|
98e98103c4 | ||
|
|
6505d54550 | ||
|
|
044576f990 | ||
|
|
b306f02fef | ||
|
|
2f794e45da | ||
|
|
2a66cc7a9f | ||
|
|
9105f91297 | ||
|
|
fb6d13035f | ||
|
|
2ad3844571 | ||
|
|
a5eafdb285 | ||
|
|
507d52091f | ||
|
|
08897f632a | ||
|
|
de028a7cc6 | ||
|
|
4f4bd6dd1b | ||
|
|
c91f0db114 | ||
|
|
0ec10faa31 | ||
|
|
4e7bfdb738 | ||
|
|
c760ed64a6 | ||
|
|
6e67ad4e11 | ||
|
|
eaa233b93a | ||
|
|
ab0f0b5b1f | ||
|
|
47aa25550d | ||
|
|
830211562d | ||
|
|
84b22d22d1 | ||
|
|
de8f88abff | ||
|
|
0934deae23 | ||
|
|
29580e0981 | ||
|
|
51fe0e6566 | ||
|
|
f1bb141b20 | ||
|
|
7ce195fe66 | ||
|
|
922de770b3 | ||
|
|
786877cad7 | ||
|
|
9824f73251 | ||
|
|
1e53994065 | ||
|
|
02d257b72d | ||
|
|
8936b856ed | ||
|
|
7e55406c4c | ||
|
|
7d5aa6a16b | ||
|
|
8e80e025cf | ||
|
|
66148d15d7 | ||
|
|
d1e2132da2 | ||
|
|
b5fc1d61bb | ||
|
|
0403b1b732 | ||
|
|
899eb52ec4 | ||
|
|
0fef1b9e2a | ||
|
|
d12770f5ab | ||
|
|
3151069086 | ||
|
|
d56cecd27b | ||
|
|
7a3ea5b14c | ||
|
|
fba687739e | ||
|
|
743b49244f | ||
|
|
96d92ae440 | ||
|
|
70cf49e411 | ||
|
|
1cc4fd5a11 | ||
|
|
ae09f35c1c | ||
|
|
1418ba5287 | ||
|
|
152ab1e4c1 | ||
|
|
3486c5de54 | ||
|
|
364ec7c1a9 | ||
|
|
e9871cbe91 | ||
|
|
b679d6e13f | ||
|
|
ff9f5c78e8 | ||
|
|
471dfbe09b | ||
|
|
09c7aa1a75 | ||
|
|
1130735875 | ||
|
|
6b827995fd | ||
|
|
75e09d2c3e | ||
|
|
84d6339b06 | ||
|
|
f7cc418588 | ||
|
|
f7b09cf139 | ||
|
|
7cdfa0e2c7 | ||
|
|
d5c0cd4571 | ||
|
|
c39ffe7ef1 | ||
|
|
f0107b42a6 | ||
|
|
7f9438fbeb | ||
|
|
28d4f1c6ff | ||
|
|
64691ab82d | ||
|
|
e9da691221 | ||
|
|
b6a594ae4d | ||
|
|
de5aa12918 | ||
|
|
a7898111c7 | ||
|
|
7e2ac71268 | ||
|
|
e606512be4 | ||
|
|
31e2616da5 | ||
|
|
3f0e1e3ba6 | ||
|
|
a5c6a7afb0 | ||
|
|
31248ae89e | ||
|
|
9ed88da140 | ||
|
|
edce34d6fb | ||
|
|
ac0a9d4dac | ||
|
|
4bd2199acc | ||
|
|
788a54eed4 | ||
|
|
0fc44d65f4 | ||
|
|
8f0faca934 | ||
|
|
cb2f183a04 | ||
|
|
99a9f54a9c | ||
|
|
347094502f | ||
|
|
1eaa0245fc | ||
|
|
ff31e08d31 | ||
|
|
f41807911b | ||
|
|
8cd04b1715 | ||
|
|
9548626609 | ||
|
|
1023bfc72f | ||
|
|
17faac3c20 | ||
|
|
b7f82aa3b9 | ||
|
|
7c9b7e9677 | ||
|
|
33d32dfa9a | ||
|
|
620987f6d2 | ||
|
|
41d67716e2 | ||
|
|
0c90b718b6 | ||
|
|
a476ee630f | ||
|
|
7c9ec3ff9b | ||
|
|
416312d9c3 | ||
|
|
09d78b8352 | ||
|
|
0ae198ba47 | ||
|
|
c4a2e1a541 | ||
|
|
ba9c20a022 | ||
|
|
7068a579f3 | ||
|
|
90497791da | ||
|
|
f2a9d54ced | ||
|
|
28c58f6940 | ||
|
|
e989d4cc9c | ||
|
|
77089dab59 | ||
|
|
4642f3d06d | ||
|
|
0ed34062d9 | ||
|
|
aadbab1fb9 | ||
|
|
94b3c86f28 | ||
|
|
f74832d10b | ||
|
|
015b682127 | ||
|
|
aca34d6d9e | ||
|
|
a7afc2fce6 | ||
|
|
981dcb5e9b | ||
|
|
bce1408b6d | ||
|
|
b862a7d7cf | ||
|
|
885adbe06f | ||
|
|
a7591f19c0 | ||
|
|
2fef86d189 | ||
|
|
9522f4ba83 | ||
|
|
904a2c7c8e | ||
|
|
5929f400c0 | ||
|
|
74a6904066 | ||
|
|
d55bf72bd2 | ||
|
|
b6ede2e01f | ||
|
|
624bc0726d | ||
|
|
2dfa33c6bc | ||
|
|
f5e1856086 | ||
|
|
2667f17977 | ||
|
|
9af389645f | ||
|
|
b77d737d07 | ||
|
|
e1a5df813c | ||
|
|
6ddfdfd016 | ||
|
|
b72381e3a0 | ||
|
|
d89fee6bd8 | ||
|
|
5fe2c9f229 | ||
|
|
d923376ef5 | ||
|
|
02aa2afa98 | ||
|
|
bc572cc62c | ||
|
|
ed0fb4190e | ||
|
|
adf551e0eb | ||
|
|
5588aa4722 | ||
|
|
e79d3612ff | ||
|
|
86c21aa934 | ||
|
|
4d873a0e82 | ||
|
|
461b54d859 | ||
|
|
1429c7c098 | ||
|
|
8ed7e2687e | ||
|
|
7c4e110dd5 | ||
|
|
f6de67c1ff | ||
|
|
756aa00496 | ||
|
|
aa68a0fbe6 | ||
|
|
7998c26dcf | ||
|
|
ac214458ea | ||
|
|
131849abfd | ||
|
|
4e039ed785 | ||
|
|
928115c8c1 | ||
|
|
d21f7d6ebe | ||
|
|
865b153e97 | ||
|
|
04fc52073f | ||
|
|
ecf3ad0ac4 | ||
|
|
40dd72e883 | ||
|
|
5aab7b1710 | ||
|
|
a2c4e4991f | ||
|
|
14a1b83df7 | ||
|
|
6fa52c8b32 | ||
|
|
e1fae5d0e5 | ||
|
|
ad023d45ac | ||
|
|
da1ba10101 | ||
|
|
df690ded0e | ||
|
|
fae5322fc9 | ||
|
|
c420a44221 | ||
|
|
ee256e59d5 | ||
|
|
fc638dd47d | ||
|
|
cd9e02f393 | ||
|
|
a6ea927ca0 | ||
|
|
63526b0912 | ||
|
|
7375fd87dd | ||
|
|
8ce3914e55 | ||
|
|
8c431d01bd | ||
|
|
e0933a70dc | ||
|
|
75ec0d61c8 | ||
|
|
e796bc01c4 | ||
|
|
b257e7f276 | ||
|
|
628f852309 | ||
|
|
77359ddf7c | ||
|
|
ca7c368f12 | ||
|
|
805b0dd302 | ||
|
|
84158b4a5e | ||
|
|
a4282eae48 | ||
|
|
07651cfb4f | ||
|
|
61b0168901 | ||
|
|
061078fcfd | ||
|
|
f6d6deed5c | ||
|
|
f568ed2343 | ||
|
|
d1cc3359a0 | ||
|
|
bf3da569ca | ||
|
|
b81efd7f7d | ||
| d7283b3505 | |||
|
|
e8138dd5b1 | ||
| 6f0f9d08b1 |
70 changed files with 3797 additions and 1121 deletions
14
.github/workflows/bundle_cron.yml
vendored
14
.github/workflows/bundle_cron.yml
vendored
|
|
@ -1,3 +1,7 @@
|
|||
# SPDX-FileCopyrightText: 2019 Michael Schroeder
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
name: Update Bundles
|
||||
|
||||
on:
|
||||
|
|
@ -29,18 +33,14 @@ jobs:
|
|||
- 6379/tcp
|
||||
options: --entrypoint redis-server
|
||||
steps:
|
||||
- name: Dump GitHub context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Versions
|
||||
run: |
|
||||
python3 --version
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: true
|
||||
- name: Install deps
|
||||
|
|
@ -52,5 +52,7 @@ jobs:
|
|||
ADABOT_GITHUB_USER: ${{ secrets.ADABOT_GITHUB_USER }}
|
||||
ADABOT_GITHUB_ACCESS_TOKEN: ${{ secrets.ADABOT_GITHUB_ACCESS_TOKEN }}
|
||||
REDIS_PORT: ${{ job.services.redis.ports[6379] }}
|
||||
BIGQUERY_PRIVATE_KEY: ${{ secrets.BIGQUERY_PRIVATE_KEY }}
|
||||
BIGQUERY_CLIENT_EMAIL: ${{ secrets.BIGQUERY_CLIENT_EMAIL }}
|
||||
run: |
|
||||
python3 -u -m adabot.circuitpython_bundle
|
||||
|
|
|
|||
41
.github/workflows/learn_cron.yml
vendored
Normal file
41
.github/workflows/learn_cron.yml
vendored
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
# SPDX-FileCopyrightText: 2021 Jeff Epler for Adafruit Industries
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
name: Tag Learning System Guides
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 5 * * *
|
||||
|
||||
jobs:
|
||||
check-repo-owner:
|
||||
# This job is so the entire workflow will end successfully and give some
|
||||
# output to explain why it hasn't run on a non-Adafruit fork.
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ (github.repository_owner != 'adafruit') }}
|
||||
steps:
|
||||
- run: |
|
||||
echo "This workflow is only intended to run in the adafruit fork of adabot"
|
||||
|
||||
update-learn:
|
||||
runs-on: ubuntu-latest
|
||||
# Only run the build if the access token has been configured. THs will be
|
||||
# the case on Adafruit's repository. Its necessary to do this here, since
|
||||
# 'schedule' events cannot (currently) be limited (they run on all forks'
|
||||
# default branches).
|
||||
if: ${{ (github.repository_owner == 'adafruit') }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: ${{ github.repository_owner }}/Adafruit_Learning_System_Guides
|
||||
token: ${{ secrets.ADABOT_GITHUB_ACCESS_TOKEN }}
|
||||
- name: Tag a release
|
||||
env:
|
||||
ADABOT_EMAIL: ${{ secrets.ADABOT_EMAIL }}
|
||||
run: |
|
||||
git config --global user.name adabot
|
||||
git config --global user.email "$ADABOT_EMAIL"
|
||||
TAG_NAME=`date +%Y%m%d`
|
||||
git tag $TAG_NAME
|
||||
git push origin $TAG_NAME
|
||||
12
.github/workflows/pre-commit.yml
vendored
12
.github/workflows/pre-commit.yml
vendored
|
|
@ -1,3 +1,7 @@
|
|||
# SPDX-FileCopyrightText: 2021 Michael Schroeder
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
name: Run pre-commit
|
||||
|
||||
on: [pull_request, push]
|
||||
|
|
@ -6,19 +10,15 @@ jobs:
|
|||
pre-commit:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Dump GitHub context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Versions
|
||||
run: |
|
||||
python3 --version
|
||||
- name: Checkout Current Repo
|
||||
uses: actions/checkout@v1
|
||||
uses: actions/checkout@v3
|
||||
- name: Pip install requirements
|
||||
run: |
|
||||
pip install --force-reinstall -r requirements.txt
|
||||
|
|
|
|||
24
.github/workflows/reports_cron.yml
vendored
24
.github/workflows/reports_cron.yml
vendored
|
|
@ -1,8 +1,16 @@
|
|||
# SPDX-FileCopyrightText: 2019 Michael Schroeder
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
name: Run Daily Reports
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# The actor (github.actor) that runs the cron job may be the user who created the cron job
|
||||
# initially. It does not appear to be settable via a secret or environment variable.
|
||||
- cron: 15 5 * * *
|
||||
workflow_dispatch:
|
||||
|
||||
|
||||
jobs:
|
||||
check-repo-owner:
|
||||
|
|
@ -25,19 +33,18 @@ jobs:
|
|||
env:
|
||||
ADABOT_GITHUB_USER: ${{ secrets.ADABOT_GITHUB_USER }}
|
||||
ADABOT_GITHUB_ACCESS_TOKEN: ${{ secrets.ADABOT_GITHUB_ACCESS_TOKEN }}
|
||||
RTD_TOKEN: ${{ secrets.RTD_TOKEN }}
|
||||
BIGQUERY_PRIVATE_KEY: ${{ secrets.BIGQUERY_PRIVATE_KEY }}
|
||||
BIGQUERY_CLIENT_EMAIL: ${{ secrets.BIGQUERY_CLIENT_EMAIL }}
|
||||
steps:
|
||||
- name: Dump GitHub context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Versions
|
||||
run: |
|
||||
python3 --version
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: true
|
||||
- name: Install deps
|
||||
|
|
@ -48,9 +55,9 @@ jobs:
|
|||
- name: Set Date Variable
|
||||
id: today
|
||||
run: |
|
||||
echo ::set-output name=date::$(
|
||||
echo date=$(
|
||||
date +%Y%m%d
|
||||
)
|
||||
) >> $GITHUB_OUTPUT
|
||||
- name: Run adabot.circuitpython_libraries
|
||||
env:
|
||||
# LIB_CHECK_CP_FILE is for circuitpython_libraries.py output
|
||||
|
|
@ -76,6 +83,7 @@ jobs:
|
|||
run: |
|
||||
ls bin/adabot
|
||||
- name: Upload Reports To AWS S3
|
||||
if: ${{ github.event_name != 'workflow_dispatch' }}
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
|
|
|
|||
13
.github/workflows/test.yml
vendored
13
.github/workflows/test.yml
vendored
|
|
@ -1,3 +1,7 @@
|
|||
# SPDX-FileCopyrightText: 2021 Michael Schroeder
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
name: Test Adabot
|
||||
|
||||
on: [push, pull_request]
|
||||
|
|
@ -27,18 +31,14 @@ jobs:
|
|||
- 6379/tcp
|
||||
options: --entrypoint redis-server
|
||||
steps:
|
||||
- name: Dump GitHub context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Versions
|
||||
run: |
|
||||
python3 --version
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: true
|
||||
- name: Install deps
|
||||
|
|
@ -49,6 +49,7 @@ jobs:
|
|||
ADABOT_EMAIL: ${{ secrets.ADABOT_EMAIL }}
|
||||
ADABOT_GITHUB_USER: ${{ secrets.ADABOT_GITHUB_USER }}
|
||||
ADABOT_GITHUB_ACCESS_TOKEN: ${{ secrets.ADABOT_GITHUB_ACCESS_TOKEN }}
|
||||
RTD_TOKEN: ${{ secrets.RTD_TOKEN }}
|
||||
REDIS_PORT: ${{ job.services.redis.ports[6379] }}
|
||||
run: |
|
||||
python3 -u -m pytest
|
||||
|
|
|
|||
6
.gitignore
vendored
6
.gitignore
vendored
|
|
@ -1,11 +1,17 @@
|
|||
# SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
__pycache__
|
||||
_build
|
||||
.bundles/*
|
||||
*.pyc
|
||||
.env
|
||||
.venv
|
||||
env.sh
|
||||
*.swp
|
||||
.libraries/*
|
||||
.gitlibs/*
|
||||
.cp_org/*
|
||||
.blinka/*
|
||||
.vscode
|
||||
|
|
|
|||
|
|
@ -1,16 +1,24 @@
|
|||
# SPDX-FileCopyrightText: 2021 Michael Schroeder
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
exclude: patches
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.0.1
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 21.6b0
|
||||
hooks:
|
||||
- id: black
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- repo: https://github.com/fsfe/reuse-tool
|
||||
rev: v1.1.2
|
||||
hooks:
|
||||
- id: reuse
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.3.0
|
||||
hooks:
|
||||
- id: black
|
||||
- repo: https://github.com/pycqa/pylint
|
||||
rev: v2.9.3
|
||||
rev: v2.17.4
|
||||
hooks:
|
||||
- id: pylint
|
||||
name: pylint
|
||||
|
|
|
|||
832
.pylintrc
832
.pylintrc
|
|
@ -1,101 +1,472 @@
|
|||
[MASTER]
|
||||
# SPDX-FileCopyrightText: 2017 Scott Shawcroft, written for Adafruit Industries
|
||||
#
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
[MAIN]
|
||||
|
||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||
# 3 compatible code, which means that the block might have code that exists
|
||||
# only in one or another interpreter, leading to false positives when analysed.
|
||||
analyse-fallback-blocks=no
|
||||
|
||||
# Clear in-memory caches upon conclusion of linting. Useful if running pylint
|
||||
# in a server-like mode.
|
||||
clear-cache-post-run=no
|
||||
|
||||
# Load and enable all available extensions. Use --list-extensions to see a list
|
||||
# all available extensions.
|
||||
#enable-all-extensions=
|
||||
|
||||
# In error mode, messages with a category besides ERROR or FATAL are
|
||||
# suppressed, and no reports are done by default. Error mode is compatible with
|
||||
# disabling specific errors.
|
||||
#errors-only=
|
||||
|
||||
# Always return a 0 (non-error) status code, even if lint errors are found.
|
||||
# This is primarily useful in continuous integration scripts.
|
||||
#exit-zero=
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code
|
||||
# run arbitrary code.
|
||||
extension-pkg-allow-list=
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
|
||||
# for backward compatibility.)
|
||||
extension-pkg-whitelist=
|
||||
|
||||
# Add files or directories to the blacklist. They should be base names, not
|
||||
# paths.
|
||||
# Return non-zero exit code if any of these messages/categories are detected,
|
||||
# even if score is above --fail-under value. Syntax same as enable. Messages
|
||||
# specified are enabled, while categories only check already-enabled messages.
|
||||
fail-on=
|
||||
|
||||
# Specify a score threshold under which the program will exit with error.
|
||||
fail-under=10
|
||||
|
||||
# Interpret the stdin as a python script, whose filename needs to be passed as
|
||||
# the module_or_package argument.
|
||||
#from-stdin=
|
||||
|
||||
# Files or directories to be skipped. They should be base names, not paths.
|
||||
ignore=CVS
|
||||
|
||||
# Add files or directories matching the regex patterns to the blacklist. The
|
||||
# regex matches against base names, not paths.
|
||||
ignore-patterns=
|
||||
# Add files or directories matching the regular expressions patterns to the
|
||||
# ignore-list. The regex matches against paths and can be in Posix or Windows
|
||||
# format. Because '\\' represents the directory delimiter on Windows systems,
|
||||
# it can't be used as an escape character.
|
||||
ignore-paths=
|
||||
|
||||
# Files or directories matching the regular expression patterns are skipped.
|
||||
# The regex matches against base names, not paths. The default value ignores
|
||||
# Emacs file locks
|
||||
ignore-patterns=^\.#
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis). It
|
||||
# supports qualified module names, as well as Unix pattern matching.
|
||||
ignored-modules=
|
||||
|
||||
# Python code to execute, usually for sys.path manipulation such as
|
||||
# pygtk.require().
|
||||
#init-hook=
|
||||
|
||||
# Use multiple processes to speed up Pylint.
|
||||
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
|
||||
# number of processors available to use, and will cap the count on Windows to
|
||||
# avoid hangs.
|
||||
jobs=1
|
||||
|
||||
# List of plugins (as comma separated values of python modules names) to load,
|
||||
# Control the amount of potential inferred values when inferring a single
|
||||
# object. This can help the performance when dealing with large functions or
|
||||
# complex, nested conditions.
|
||||
limit-inference-results=100
|
||||
|
||||
# List of plugins (as comma separated values of python module names) to load,
|
||||
# usually to register additional checkers.
|
||||
load-plugins=
|
||||
load-plugins=pylint.extensions.no_self_use
|
||||
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=yes
|
||||
|
||||
# Specify a configuration file.
|
||||
#rcfile=
|
||||
# Minimum Python version to use for version dependent checks. Will default to
|
||||
# the version used to run pylint.
|
||||
py-version=3.11
|
||||
|
||||
# Discover python modules and packages in the file system subtree.
|
||||
recursive=no
|
||||
|
||||
# Add paths to the list of the source roots. Supports globbing patterns. The
|
||||
# source root is an absolute path or a path relative to the current working
|
||||
# directory used to determine a package namespace for modules located under the
|
||||
# source root.
|
||||
source-roots=
|
||||
|
||||
# When enabled, pylint would attempt to guess common misconfiguration and emit
|
||||
# user-friendly hints instead of false-positive error messages.
|
||||
suggestion-mode=yes
|
||||
|
||||
# Allow loading of arbitrary C extensions. Extensions are imported into the
|
||||
# active Python interpreter and may run arbitrary code.
|
||||
unsafe-load-any-extension=no
|
||||
|
||||
# In verbose mode, extra non-checker-related info will be displayed.
|
||||
#verbose=
|
||||
|
||||
|
||||
[BASIC]
|
||||
|
||||
# Naming style matching correct argument names.
|
||||
argument-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct argument names. Overrides argument-
|
||||
# naming-style. If left empty, argument names will be checked with the set
|
||||
# naming style.
|
||||
#argument-rgx=
|
||||
|
||||
# Naming style matching correct attribute names.
|
||||
attr-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct attribute names. Overrides attr-naming-
|
||||
# style. If left empty, attribute names will be checked with the set naming
|
||||
# style.
|
||||
#attr-rgx=
|
||||
|
||||
# Bad variable names which should always be refused, separated by a comma.
|
||||
bad-names=foo,
|
||||
bar,
|
||||
baz,
|
||||
toto,
|
||||
tutu,
|
||||
tata
|
||||
|
||||
# Bad variable names regexes, separated by a comma. If names match any regex,
|
||||
# they will always be refused
|
||||
bad-names-rgxs=
|
||||
|
||||
# Naming style matching correct class attribute names.
|
||||
class-attribute-naming-style=any
|
||||
|
||||
# Regular expression matching correct class attribute names. Overrides class-
|
||||
# attribute-naming-style. If left empty, class attribute names will be checked
|
||||
# with the set naming style.
|
||||
#class-attribute-rgx=
|
||||
|
||||
# Naming style matching correct class constant names.
|
||||
class-const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct class constant names. Overrides class-
|
||||
# const-naming-style. If left empty, class constant names will be checked with
|
||||
# the set naming style.
|
||||
#class-const-rgx=
|
||||
|
||||
# Naming style matching correct class names.
|
||||
class-naming-style=PascalCase
|
||||
|
||||
# Regular expression matching correct class names. Overrides class-naming-
|
||||
# style. If left empty, class names will be checked with the set naming style.
|
||||
#class-rgx=
|
||||
|
||||
# Naming style matching correct constant names.
|
||||
const-naming-style=UPPER_CASE
|
||||
|
||||
# Regular expression matching correct constant names. Overrides const-naming-
|
||||
# style. If left empty, constant names will be checked with the set naming
|
||||
# style.
|
||||
#const-rgx=
|
||||
|
||||
# Minimum line length for functions/classes that require docstrings, shorter
|
||||
# ones are exempt.
|
||||
docstring-min-length=-1
|
||||
|
||||
# Naming style matching correct function names.
|
||||
function-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct function names. Overrides function-
|
||||
# naming-style. If left empty, function names will be checked with the set
|
||||
# naming style.
|
||||
#function-rgx=
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma.
|
||||
good-names=i,
|
||||
j,
|
||||
k,
|
||||
ex,
|
||||
Run,
|
||||
_
|
||||
|
||||
# Good variable names regexes, separated by a comma. If names match any regex,
|
||||
# they will always be accepted
|
||||
good-names-rgxs=
|
||||
|
||||
# Include a hint for the correct naming format with invalid-name.
|
||||
include-naming-hint=no
|
||||
|
||||
# Naming style matching correct inline iteration names.
|
||||
inlinevar-naming-style=any
|
||||
|
||||
# Regular expression matching correct inline iteration names. Overrides
|
||||
# inlinevar-naming-style. If left empty, inline iteration names will be checked
|
||||
# with the set naming style.
|
||||
#inlinevar-rgx=
|
||||
|
||||
# Naming style matching correct method names.
|
||||
method-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct method names. Overrides method-naming-
|
||||
# style. If left empty, method names will be checked with the set naming style.
|
||||
#method-rgx=
|
||||
|
||||
# Naming style matching correct module names.
|
||||
module-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct module names. Overrides module-naming-
|
||||
# style. If left empty, module names will be checked with the set naming style.
|
||||
#module-rgx=
|
||||
|
||||
# Colon-delimited sets of names that determine each other's naming style when
|
||||
# the name regexes allow several styles.
|
||||
name-group=
|
||||
|
||||
# Regular expression which should only match function or class names that do
|
||||
# not require a docstring.
|
||||
no-docstring-rgx=^_
|
||||
|
||||
# List of decorators that produce properties, such as abc.abstractproperty. Add
|
||||
# to this list to register other decorators that produce valid properties.
|
||||
# These decorators are taken in consideration only for invalid-name.
|
||||
property-classes=abc.abstractproperty
|
||||
|
||||
# Regular expression matching correct type alias names. If left empty, type
|
||||
# alias names will be checked with the set naming style.
|
||||
#typealias-rgx=
|
||||
|
||||
# Regular expression matching correct type variable names. If left empty, type
|
||||
# variable names will be checked with the set naming style.
|
||||
#typevar-rgx=
|
||||
|
||||
# Naming style matching correct variable names.
|
||||
variable-naming-style=snake_case
|
||||
|
||||
# Regular expression matching correct variable names. Overrides variable-
|
||||
# naming-style. If left empty, variable names will be checked with the set
|
||||
# naming style.
|
||||
#variable-rgx=
|
||||
|
||||
|
||||
[CLASSES]
|
||||
|
||||
# Warn about protected attribute access inside special methods
|
||||
check-protected-access-in-special-methods=no
|
||||
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,
|
||||
__new__,
|
||||
setUp,
|
||||
asyncSetUp,
|
||||
__post_init__
|
||||
|
||||
# List of member names, which should be excluded from the protected access
|
||||
# warning.
|
||||
exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit
|
||||
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=mcs
|
||||
|
||||
|
||||
[DESIGN]
|
||||
|
||||
# List of regular expressions of class ancestor names to ignore when counting
|
||||
# public methods (see R0903)
|
||||
exclude-too-few-public-methods=
|
||||
|
||||
# List of qualified class names to ignore when counting class parents (see
|
||||
# R0901)
|
||||
ignored-parents=
|
||||
|
||||
# Maximum number of arguments for function / method.
|
||||
max-args=5
|
||||
|
||||
# Maximum number of attributes for a class (see R0902).
|
||||
max-attributes=7
|
||||
|
||||
# Maximum number of boolean expressions in an if statement (see R0916).
|
||||
max-bool-expr=5
|
||||
|
||||
# Maximum number of branch for function / method body.
|
||||
max-branches=12
|
||||
|
||||
# Maximum number of locals for function / method body.
|
||||
max-locals=15
|
||||
|
||||
# Maximum number of parents for a class (see R0901).
|
||||
max-parents=7
|
||||
|
||||
# Maximum number of public methods for a class (see R0904).
|
||||
max-public-methods=20
|
||||
|
||||
# Maximum number of return / yield for function / method body.
|
||||
max-returns=6
|
||||
|
||||
# Maximum number of statements in function / method body.
|
||||
max-statements=50
|
||||
|
||||
# Minimum number of public methods for a class (see R0903).
|
||||
min-public-methods=2
|
||||
|
||||
|
||||
[EXCEPTIONS]
|
||||
|
||||
# Exceptions that will emit a warning when caught.
|
||||
overgeneral-exceptions=builtins.BaseException,builtins.Exception
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||
expected-line-ending-format=
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Number of spaces of indent required inside a hanging or continued line.
|
||||
indent-after-paren=4
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=100
|
||||
|
||||
# Maximum number of lines in a module.
|
||||
max-module-lines=1000
|
||||
|
||||
# Allow the body of a class to be on the same line as the declaration if body
|
||||
# contains single statement.
|
||||
single-line-class-stmt=no
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
# List of modules that can be imported at any level, not just the top level
|
||||
# one.
|
||||
allow-any-import-level=
|
||||
|
||||
# Allow explicit reexports by alias from a package __init__.
|
||||
allow-reexport-from-package=no
|
||||
|
||||
# Allow wildcard imports from modules that define __all__.
|
||||
allow-wildcard-with-all=no
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma.
|
||||
deprecated-modules=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of external dependencies
|
||||
# to the given file (report RP0402 must not be disabled).
|
||||
ext-import-graph=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of all (i.e. internal and
|
||||
# external) dependencies to the given file (report RP0402 must not be
|
||||
# disabled).
|
||||
import-graph=
|
||||
|
||||
# Output a graph (.gv or any supported image format) of internal dependencies
|
||||
# to the given file (report RP0402 must not be disabled).
|
||||
int-import-graph=
|
||||
|
||||
# Force import order to recognize a module as part of the standard
|
||||
# compatibility libraries.
|
||||
known-standard-library=
|
||||
|
||||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=enchant
|
||||
|
||||
# Couples of modules and preferred modules, separated by a comma.
|
||||
preferred-modules=
|
||||
|
||||
|
||||
[LOGGING]
|
||||
|
||||
# The type of string formatting that logging methods do. `old` means using %
|
||||
# formatting, `new` is for `{}` formatting.
|
||||
logging-format-style=old
|
||||
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format.
|
||||
logging-modules=logging
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
|
||||
confidence=
|
||||
# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
|
||||
# UNDEFINED.
|
||||
confidence=HIGH,
|
||||
CONTROL_FLOW,
|
||||
INFERENCE,
|
||||
INFERENCE_FAILURE,
|
||||
UNDEFINED
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once).You can also use "--disable=all" to
|
||||
# disable everything first and then reenable specific checks. For example, if
|
||||
# file where it should appear only once). You can also use "--disable=all" to
|
||||
# disable everything first and then re-enable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
disable=print-statement,parameter-unpacking,unpacking-in-except,old-raise-syntax,backtick,
|
||||
long-suffix,old-ne-operator,old-octal-literal,import-star-module-level,
|
||||
raw-checker-failed,bad-inline-option,locally-disabled,locally-enabled,file-ignored,
|
||||
suppressed-message,useless-suppression,deprecated-pragma,apply-builtin,basestring-builtin,
|
||||
buffer-builtin,cmp-builtin,coerce-builtin,execfile-builtin,file-builtin,long-builtin,
|
||||
raw_input-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,
|
||||
coerce-method,delslice-method,getslice-method,setslice-method,no-absolute-import,
|
||||
old-division,dict-iter-method,dict-view-method,next-method-called,metaclass-assignment,
|
||||
indexing-exception,raising-string,reload-builtin,oct-method,hex-method,nonzero-method,
|
||||
cmp-method,input-builtin,round-builtin,intern-builtin,unichr-builtin,
|
||||
map-builtin-not-iterating,zip-builtin-not-iterating,range-builtin-not-iterating,
|
||||
filter-builtin-not-iterating,using-cmp-argument,eq-without-hash,div-method,idiv-method,
|
||||
rdiv-method,exception-message-attribute,invalid-str-codec,sys-max-int,bad-python3-import,
|
||||
deprecated-string-function,deprecated-str-translate-call,import-error,bad-continuation,
|
||||
subprocess-run-check,too-many-lines
|
||||
# no Warning level messages displayed, use "--disable=all --enable=classes
|
||||
# --disable=W".
|
||||
disable=raw-checker-failed,
|
||||
bad-inline-option,
|
||||
locally-disabled,
|
||||
file-ignored,
|
||||
suppressed-message,
|
||||
useless-suppression,
|
||||
deprecated-pragma,
|
||||
use-symbolic-message-instead,
|
||||
import-error,
|
||||
consider-using-f-string, # to-fix
|
||||
unspecified-encoding, # to-fix
|
||||
consider-using-generator, # to-fix
|
||||
fixme, # to-fix
|
||||
use-dict-literal, # to-fix
|
||||
invalid-name, # to-fix
|
||||
superfluous-parens, # to-fix
|
||||
duplicate-code,
|
||||
redefined-outer-name, # to-fix
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
# multiple time (only on the command line, not in the configuration file where
|
||||
# it should appear only once). See also the "--disable" option for examples.
|
||||
enable=
|
||||
enable=c-extension-no-member
|
||||
|
||||
|
||||
[REPORTS]
|
||||
[METHOD_ARGS]
|
||||
|
||||
# Python expression which should return a note less than 10 (10 is the highest
|
||||
# note). You have access to the variables errors warning, statement which
|
||||
# respectively contain the number of errors / warnings messages and the total
|
||||
# number of statements analyzed. This is used by the global evaluation report
|
||||
# (RP0004).
|
||||
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||
# List of qualified names (i.e., library.method) which require a timeout
|
||||
# parameter e.g. 'requests.api.get,requests.api.post'
|
||||
timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request
|
||||
|
||||
# Template used to display messages. This is a python new-style format string
|
||||
# used to format the message information. See doc for all details
|
||||
#msg-template=
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, json
|
||||
# and msvs (visual studio).You can also give a reporter class, eg
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
output-format=text
|
||||
[MISCELLANEOUS]
|
||||
|
||||
# Tells whether to display a full report or only the messages
|
||||
reports=no
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
notes=FIXME,
|
||||
XXX,
|
||||
TODO
|
||||
|
||||
# Activate the evaluation score.
|
||||
score=yes
|
||||
# Regular expression of note tags to take in consideration.
|
||||
notes-rgx=
|
||||
|
||||
|
||||
[REFACTORING]
|
||||
|
|
@ -103,36 +474,89 @@ score=yes
|
|||
# Maximum number of nested blocks for function / method body
|
||||
max-nested-blocks=5
|
||||
|
||||
# Complete name of functions that never returns. When checking for
|
||||
# inconsistent-return-statements if a never returning function is called then
|
||||
# it will be considered as an explicit return statement and no message will be
|
||||
# printed.
|
||||
never-returning-functions=sys.exit,argparse.parse_error
|
||||
|
||||
[LOGGING]
|
||||
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format
|
||||
logging-modules=logging
|
||||
[REPORTS]
|
||||
|
||||
# Python expression which should return a score less than or equal to 10. You
|
||||
# have access to the variables 'fatal', 'error', 'warning', 'refactor',
|
||||
# 'convention', and 'info' which contain the number of messages in each
|
||||
# category, as well as 'statement' which is the total number of statements
|
||||
# analyzed. This score is used by the global evaluation report (RP0004).
|
||||
evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))
|
||||
|
||||
# Template used to display messages. This is a python new-style format string
|
||||
# used to format the message information. See doc for all details.
|
||||
msg-template=
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, json
|
||||
# and msvs (visual studio). You can also give a reporter class, e.g.
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
#output-format=
|
||||
|
||||
# Tells whether to display a full report or only the messages.
|
||||
reports=no
|
||||
|
||||
# Activate the evaluation score.
|
||||
score=yes
|
||||
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
# Comments are removed from the similarity computation
|
||||
ignore-comments=yes
|
||||
|
||||
# Docstrings are removed from the similarity computation
|
||||
ignore-docstrings=yes
|
||||
|
||||
# Imports are removed from the similarity computation
|
||||
ignore-imports=yes
|
||||
|
||||
# Signatures are removed from the similarity computation
|
||||
ignore-signatures=yes
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=4
|
||||
|
||||
|
||||
[SPELLING]
|
||||
|
||||
# Spelling dictionary name. Available dictionaries: none. To make it working
|
||||
# install python-enchant package.
|
||||
# Limits count of emitted suggestions for spelling mistakes.
|
||||
max-spelling-suggestions=4
|
||||
|
||||
# Spelling dictionary name. No available dictionaries : You need to install
|
||||
# both the python package and the system dependency for enchant to work..
|
||||
spelling-dict=
|
||||
|
||||
# List of comma separated words that should be considered directives if they
|
||||
# appear at the beginning of a comment and should not be checked.
|
||||
spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
|
||||
|
||||
# List of comma separated words that should not be checked.
|
||||
spelling-ignore-words=
|
||||
|
||||
# A path to a file that contains private dictionary; one word per line.
|
||||
# A path to a file that contains the private dictionary; one word per line.
|
||||
spelling-private-dict-file=
|
||||
|
||||
# Tells whether to store unknown words to indicated private dictionary in
|
||||
# --spelling-private-dict-file option instead of raising a message.
|
||||
# Tells whether to store unknown words to the private dictionary (see the
|
||||
# --spelling-private-dict-file option) instead of raising a message.
|
||||
spelling-store-unknown-words=no
|
||||
|
||||
|
||||
[MISCELLANEOUS]
|
||||
[STRING]
|
||||
|
||||
# List of note tags to take in consideration, separated by a comma.
|
||||
# notes=FIXME,XXX,TODO
|
||||
notes=FIXME,XXX
|
||||
# This flag controls whether inconsistent-quotes generates a warning when the
|
||||
# character used as a quote delimiter is used inconsistently within a module.
|
||||
check-quote-consistency=no
|
||||
|
||||
# This flag controls whether the implicit-str-concat should generate a warning
|
||||
# on implicit string concatenation in sequences defined over several lines.
|
||||
check-str-concat-over-line-jumps=no
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
|
|
@ -145,11 +569,11 @@ contextmanager-decorators=contextlib.contextmanager
|
|||
# List of members which are set dynamically and missed by pylint inference
|
||||
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||
# expressions are accepted.
|
||||
generated-members=sh.ErrorReturnCode_1,sh.ErrorReturnCode_128
|
||||
generated-members=
|
||||
|
||||
# Tells whether missing members accessed in mixin class should be ignored. A
|
||||
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
||||
ignore-mixin-members=yes
|
||||
# Tells whether to warn about missing members when the owner of the attribute
|
||||
# is inferred to be None.
|
||||
ignore-none=yes
|
||||
|
||||
# This flag controls whether pylint should warn about no-member and similar
|
||||
# checks whenever an opaque object is returned when inferring. The inference
|
||||
|
|
@ -159,16 +583,16 @@ ignore-mixin-members=yes
|
|||
# the rest of the inferred objects.
|
||||
ignore-on-opaque-inference=yes
|
||||
|
||||
# List of symbolic message names to ignore for Mixin members.
|
||||
ignored-checks-for-mixins=no-member,
|
||||
not-async-context-manager,
|
||||
not-context-manager,
|
||||
attribute-defined-outside-init
|
||||
|
||||
# List of class names for which member attributes should not be checked (useful
|
||||
# for classes with dynamically set attributes). This supports the use of
|
||||
# qualified names.
|
||||
ignored-classes=optparse.Values,thread._local,_thread._local
|
||||
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis. It
|
||||
# supports qualified module names, as well as Unix pattern matching.
|
||||
ignored-modules=
|
||||
ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace
|
||||
|
||||
# Show a hint with possible names when a member name was not found. The aspect
|
||||
# of finding the hint is based on edit distance.
|
||||
|
|
@ -182,26 +606,35 @@ missing-member-hint-distance=1
|
|||
# showing a hint for a missing member.
|
||||
missing-member-max-choices=1
|
||||
|
||||
# Regex pattern to define which classes are considered mixins.
|
||||
mixin-class-rgx=.*[Mm]ixin
|
||||
|
||||
# List of decorators that change the signature of a decorated function.
|
||||
signature-mutators=
|
||||
|
||||
|
||||
[VARIABLES]
|
||||
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid to define new builtins when possible.
|
||||
# you should avoid defining new builtins when possible.
|
||||
additional-builtins=
|
||||
|
||||
# Tells whether unused global variables should be treated as a violation.
|
||||
allow-global-unused-variables=yes
|
||||
|
||||
# List of names allowed to shadow builtins
|
||||
allowed-redefined-builtins=
|
||||
|
||||
# List of strings which can identify a callback function by name. A callback
|
||||
# name must start or end with one of those strings.
|
||||
callbacks=cb_,_cb
|
||||
callbacks=cb_,
|
||||
_cb
|
||||
|
||||
# A regular expression matching the name of dummy variables (i.e. expectedly
|
||||
# not used).
|
||||
# A regular expression matching the name of dummy variables (i.e. expected to
|
||||
# not be used).
|
||||
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
|
||||
|
||||
# Argument names that match this expression will be ignored. Default to name
|
||||
# with leading underscore
|
||||
# Argument names that match this expression will be ignored.
|
||||
ignored-argument-names=_.*|^ignored_|^unused_
|
||||
|
||||
# Tells whether we should check for unused import in __init__ files.
|
||||
|
|
@ -209,237 +642,4 @@ init-import=no
|
|||
|
||||
# List of qualified module names which can have objects that can redefine
|
||||
# builtins.
|
||||
redefining-builtins-modules=six.moves,future.builtins
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||
# expected-line-ending-format=
|
||||
expected-line-ending-format=LF
|
||||
|
||||
# Regexp for a line that is allowed to be longer than the limit.
|
||||
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
|
||||
# Number of spaces of indent required inside a hanging or continued line.
|
||||
indent-after-paren=4
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=100
|
||||
|
||||
# Maximum number of lines in a module
|
||||
max-module-lines=1000
|
||||
|
||||
# List of optional constructs for which whitespace checking is disabled. `dict-
|
||||
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
|
||||
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
|
||||
# `empty-line` allows space-only lines.
|
||||
no-space-check=trailing-comma,dict-separator
|
||||
|
||||
# Allow the body of a class to be on the same line as the declaration if body
|
||||
# contains single statement.
|
||||
single-line-class-stmt=no
|
||||
|
||||
# Allow the body of an if to be on the same line as the test if there is no
|
||||
# else.
|
||||
single-line-if-stmt=no
|
||||
|
||||
|
||||
[SIMILARITIES]
|
||||
|
||||
# Ignore comments when computing similarities.
|
||||
ignore-comments=yes
|
||||
|
||||
# Ignore docstrings when computing similarities.
|
||||
ignore-docstrings=yes
|
||||
|
||||
# Ignore imports when computing similarities.
|
||||
ignore-imports=yes
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
min-similarity-lines=50
|
||||
|
||||
|
||||
[BASIC]
|
||||
|
||||
# Naming hint for argument names
|
||||
argument-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
|
||||
|
||||
# Regular expression matching correct argument names
|
||||
argument-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
|
||||
|
||||
# Naming hint for attribute names
|
||||
attr-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
|
||||
|
||||
# Regular expression matching correct attribute names
|
||||
attr-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
|
||||
|
||||
# Bad variable names which should always be refused, separated by a comma
|
||||
bad-names=foo,bar,baz,toto,tutu,tata
|
||||
|
||||
# Naming hint for class attribute names
|
||||
class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
|
||||
|
||||
# Regular expression matching correct class attribute names
|
||||
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
|
||||
|
||||
# Naming hint for class names
|
||||
# class-name-hint=[A-Z_][a-zA-Z0-9]+$
|
||||
class-name-hint=[A-Z_][a-zA-Z0-9_]+$
|
||||
|
||||
# Regular expression matching correct class names
|
||||
# class-rgx=[A-Z_][a-zA-Z0-9]+$
|
||||
class-rgx=[A-Z_][a-zA-Z0-9_]+$
|
||||
|
||||
# Naming hint for constant names
|
||||
const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
|
||||
|
||||
# Regular expression matching correct constant names
|
||||
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
|
||||
|
||||
# Minimum line length for functions/classes that require docstrings, shorter
|
||||
# ones are exempt.
|
||||
docstring-min-length=-1
|
||||
|
||||
# Naming hint for function names
|
||||
function-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
|
||||
|
||||
# Regular expression matching correct function names
|
||||
function-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma
|
||||
# good-names=i,j,k,ex,Run,_
|
||||
good-names=r,g,b,w,i,j,k,n,x,y,z,ex,ok,Run,_
|
||||
|
||||
# Include a hint for the correct naming format with invalid-name
|
||||
include-naming-hint=no
|
||||
|
||||
# Naming hint for inline iteration names
|
||||
inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
|
||||
|
||||
# Regular expression matching correct inline iteration names
|
||||
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
|
||||
|
||||
# Naming hint for method names
|
||||
method-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
|
||||
|
||||
# Regular expression matching correct method names
|
||||
method-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
|
||||
|
||||
# Naming hint for module names
|
||||
module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||
|
||||
# Regular expression matching correct module names
|
||||
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||
|
||||
# Colon-delimited sets of names that determine each other's naming style when
|
||||
# the name regexes allow several styles.
|
||||
name-group=
|
||||
|
||||
# Regular expression which should only match function or class names that do
|
||||
# not require a docstring.
|
||||
no-docstring-rgx=^_
|
||||
|
||||
# List of decorators that produce properties, such as abc.abstractproperty. Add
|
||||
# to this list to register other decorators that produce valid properties.
|
||||
property-classes=abc.abstractproperty
|
||||
|
||||
# Naming hint for variable names
|
||||
variable-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
|
||||
|
||||
# Regular expression matching correct variable names
|
||||
variable-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
# Allow wildcard imports from modules that define __all__.
|
||||
allow-wildcard-with-all=no
|
||||
|
||||
# Analyse import fallback blocks. This can be used to support both Python 2 and
|
||||
# 3 compatible code, which means that the block might have code that exists
|
||||
# only in one or another interpreter, leading to false positives when analysed.
|
||||
analyse-fallback-blocks=no
|
||||
|
||||
# Deprecated modules which should not be used, separated by a comma
|
||||
deprecated-modules=optparse,tkinter.tix
|
||||
|
||||
# Create a graph of external dependencies in the given file (report RP0402 must
|
||||
# not be disabled)
|
||||
ext-import-graph=
|
||||
|
||||
# Create a graph of every (i.e. internal and external) dependencies in the
|
||||
# given file (report RP0402 must not be disabled)
|
||||
import-graph=
|
||||
|
||||
# Create a graph of internal dependencies in the given file (report RP0402 must
|
||||
# not be disabled)
|
||||
int-import-graph=
|
||||
|
||||
# Force import order to recognize a module as part of the standard
|
||||
# compatibility libraries.
|
||||
known-standard-library=
|
||||
|
||||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=enchant
|
||||
|
||||
|
||||
[CLASSES]
|
||||
|
||||
# List of method names used to declare (i.e. assign) instance attributes.
|
||||
defining-attr-methods=__init__,__new__,setUp
|
||||
|
||||
# List of member names, which should be excluded from the protected access
|
||||
# warning.
|
||||
exclude-protected=_asdict,_fields,_replace,_source,_make
|
||||
|
||||
# List of valid names for the first argument in a class method.
|
||||
valid-classmethod-first-arg=cls
|
||||
|
||||
# List of valid names for the first argument in a metaclass class method.
|
||||
valid-metaclass-classmethod-first-arg=mcs
|
||||
|
||||
|
||||
[DESIGN]
|
||||
|
||||
# Maximum number of arguments for function / method
|
||||
max-args=5
|
||||
|
||||
# Maximum number of attributes for a class (see R0902).
|
||||
# max-attributes=7
|
||||
max-attributes=11
|
||||
|
||||
# Maximum number of boolean expressions in a if statement
|
||||
max-bool-expr=5
|
||||
|
||||
# Maximum number of branch for function / method body
|
||||
max-branches=12
|
||||
|
||||
# Maximum number of locals for function / method body
|
||||
max-locals=15
|
||||
|
||||
# Maximum number of parents for a class (see R0901).
|
||||
max-parents=7
|
||||
|
||||
# Maximum number of public methods for a class (see R0904).
|
||||
max-public-methods=20
|
||||
|
||||
# Maximum number of return / yield for function / method body
|
||||
max-returns=6
|
||||
|
||||
# Maximum number of statements in function / method body
|
||||
max-statements=50
|
||||
|
||||
# Minimum number of public methods for a class (see R0903).
|
||||
min-public-methods=1
|
||||
|
||||
|
||||
[EXCEPTIONS]
|
||||
|
||||
# Exceptions that will emit a warning when being caught. Defaults to
|
||||
# "Exception"
|
||||
overgeneral-exceptions=Exception
|
||||
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
|
||||
|
|
|
|||
7
.reuse/dep5
Normal file
7
.reuse/dep5
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: adabot
|
||||
Source: https://github.com/adafruit/adabot.git
|
||||
|
||||
Files: patches/*
|
||||
Copyright: 2022 Kattni Rembor
|
||||
License: MIT
|
||||
3
CODE_OF_CONDUCT.md.license
Normal file
3
CODE_OF_CONDUCT.md.license
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries
|
||||
|
||||
SPDX-License-Identifier: MIT
|
||||
19
LICENSES/MIT.txt
Normal file
19
LICENSES/MIT.txt
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
MIT License Copyright (c) <year> <copyright holders>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is furnished
|
||||
to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice (including the next
|
||||
paragraph) shall be included in all copies or substantial portions of the
|
||||
Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
|
||||
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF
|
||||
OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
20
LICENSES/Unlicense.txt
Normal file
20
LICENSES/Unlicense.txt
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
This is free and unencumbered software released into the public domain.
|
||||
|
||||
Anyone is free to copy, modify, publish, use, compile, sell, or distribute
|
||||
this software, either in source code form or as a compiled binary, for any
|
||||
purpose, commercial or non-commercial, and by any means.
|
||||
|
||||
In jurisdictions that recognize copyright laws, the author or authors of this
|
||||
software dedicate any and all copyright interest in the software to the public
|
||||
domain. We make this dedication for the benefit of the public at large and
|
||||
to the detriment of our heirs and successors. We intend this dedication to
|
||||
be an overt act of relinquishment in perpetuity of all present and future
|
||||
rights to this software under copyright law.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
|
||||
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH
|
||||
THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. For more information,
|
||||
please refer to <https://unlicense.org/>
|
||||
3
README.rst.license
Normal file
3
README.rst.license
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries
|
||||
|
||||
SPDX-License-Identifier: MIT
|
||||
|
|
@ -1,5 +1,9 @@
|
|||
#!/bin/bash
|
||||
|
||||
# SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
cd /home/tannewt/adabot
|
||||
|
||||
source .env/bin/activate
|
||||
|
|
|
|||
|
|
@ -1,3 +1,7 @@
|
|||
# SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
# Screen startup file to start multiple commands under multiple screens.
|
||||
# Start with "screen -c thisfilename"
|
||||
|
||||
|
|
|
|||
|
|
@ -1,24 +1,6 @@
|
|||
# The MIT License (MIT)
|
||||
# SPDX-FileCopyrightText: 2018 Michael Schroeder
|
||||
#
|
||||
# Copyright (c) 2018 Michael Schroeder
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""Adabot utility for Arduino Libraries."""
|
||||
|
||||
|
|
@ -29,7 +11,7 @@ import traceback
|
|||
|
||||
import requests
|
||||
|
||||
from adabot import github_requests as github
|
||||
from adabot import github_requests as gh_reqs
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
ch = logging.StreamHandler(stream=sys.stdout)
|
||||
|
|
@ -68,7 +50,7 @@ def list_repos():
|
|||
repository state.
|
||||
"""
|
||||
repos = []
|
||||
result = github.get(
|
||||
result = gh_reqs.get(
|
||||
"/search/repositories",
|
||||
params={
|
||||
"q": (
|
||||
|
|
@ -88,7 +70,7 @@ def list_repos():
|
|||
) # uncomment and comment below, to include all forks
|
||||
|
||||
if result.links.get("next"):
|
||||
result = github.get(result.links["next"]["url"])
|
||||
result = gh_reqs.get(result.links["next"]["url"])
|
||||
else:
|
||||
break
|
||||
|
||||
|
|
@ -102,7 +84,9 @@ def is_arduino_library(repo):
|
|||
lib_prop_file = requests.get(
|
||||
"https://raw.githubusercontent.com/adafruit/"
|
||||
+ repo["name"]
|
||||
+ "/master/library.properties"
|
||||
+ "/"
|
||||
+ repo["default_branch"]
|
||||
+ "/library.properties"
|
||||
)
|
||||
return lib_prop_file.ok
|
||||
|
||||
|
|
@ -130,7 +114,9 @@ def validate_library_properties(repo):
|
|||
lib_prop_file = requests.get(
|
||||
"https://raw.githubusercontent.com/adafruit/"
|
||||
+ repo["name"]
|
||||
+ "/master/library.properties"
|
||||
+ "/"
|
||||
+ repo["default_branch"]
|
||||
+ "/library.properties"
|
||||
)
|
||||
if not lib_prop_file.ok:
|
||||
# print("{} skipped".format(repo["name"]))
|
||||
|
|
@ -142,7 +128,7 @@ def validate_library_properties(repo):
|
|||
lib_version = line[len("version=") :]
|
||||
break
|
||||
|
||||
get_latest_release = github.get(
|
||||
get_latest_release = gh_reqs.get(
|
||||
"/repos/adafruit/" + repo["name"] + "/releases/latest"
|
||||
)
|
||||
if get_latest_release.ok:
|
||||
|
|
@ -169,23 +155,30 @@ def validate_release_state(repo):
|
|||
if not is_arduino_library(repo):
|
||||
return None
|
||||
|
||||
compare_tags = github.get(
|
||||
"/repos/" + repo["full_name"] + "/compare/master..." + repo["tag_name"]
|
||||
compare_tags = gh_reqs.get(
|
||||
"/repos/"
|
||||
+ repo["full_name"]
|
||||
+ "/compare/"
|
||||
+ repo["default_branch"]
|
||||
+ "..."
|
||||
+ repo["tag_name"]
|
||||
)
|
||||
if not compare_tags.ok:
|
||||
logger.error(
|
||||
"Error: failed to compare %s 'master' to tag '%s'",
|
||||
"Error: failed to compare %s '%s' to tag '%s'",
|
||||
repo["name"],
|
||||
repo["default_branch"],
|
||||
repo["tag_name"],
|
||||
)
|
||||
return None
|
||||
compare_tags_json = compare_tags.json()
|
||||
if "status" in compare_tags_json:
|
||||
if compare_tags.json()["status"] != "identical":
|
||||
if compare_tags_json["status"] != "identical":
|
||||
return [repo["tag_name"], compare_tags_json["behind_by"]]
|
||||
elif "errors" in compare_tags_json:
|
||||
logger.error(
|
||||
"Error: comparing latest release to 'master' failed on '%s'. Error Message: %s",
|
||||
"Error: comparing latest release to '%s' failed on '%s'. Error Message: %s",
|
||||
repo["default_branch"],
|
||||
repo["name"],
|
||||
compare_tags_json["message"],
|
||||
)
|
||||
|
|
@ -198,14 +191,16 @@ def validate_actions(repo):
|
|||
repo_has_actions = requests.get(
|
||||
"https://raw.githubusercontent.com/adafruit/"
|
||||
+ repo["name"]
|
||||
+ "/master/.github/workflows/githubci.yml"
|
||||
+ "/"
|
||||
+ repo["default_branch"]
|
||||
+ "/.github/workflows/githubci.yml"
|
||||
)
|
||||
return repo_has_actions.ok
|
||||
|
||||
|
||||
def validate_example(repo):
|
||||
"""Validate if a repo has any files in examples directory"""
|
||||
repo_has_ino = github.get("/repos/adafruit/" + repo["name"] + "/contents/examples")
|
||||
repo_has_ino = gh_reqs.get("/repos/adafruit/" + repo["name"] + "/contents/examples")
|
||||
return repo_has_ino.ok and len(repo_has_ino.json())
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,24 +1,6 @@
|
|||
# The MIT License (MIT)
|
||||
# SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries
|
||||
#
|
||||
# Copyright (c) 2017 Scott Shawcroft for Adafruit Industries
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
""" Checks each library in the CircuitPython Bundles for updates.
|
||||
If updates are found the bundle is updated, updates are pushed to the
|
||||
|
|
@ -36,8 +18,9 @@ import redis as redis_py
|
|||
import sh
|
||||
from sh.contrib import git
|
||||
|
||||
from adabot import github_requests as github
|
||||
from adabot import github_requests as gh_reqs
|
||||
from adabot.lib import common_funcs
|
||||
from adabot import circuitpython_library_download_stats as dl_stats
|
||||
|
||||
REDIS = None
|
||||
if "GITHUB_WORKSPACE" in os.environ:
|
||||
|
|
@ -74,6 +57,88 @@ def fetch_bundle(bundle, bundle_path):
|
|||
os.chdir(working_directory)
|
||||
|
||||
|
||||
def update_download_stats(bundle_path):
|
||||
"""
|
||||
Updates the downloads stats for all the libraries
|
||||
"""
|
||||
if not "Adafruit_CircuitPython_Bundle" in bundle_path:
|
||||
return False
|
||||
|
||||
with open(os.path.join(bundle_path, "circuitpython_library_list.md")) as md_file:
|
||||
lib_list_full = md_file.read()
|
||||
|
||||
submodules_list = common_funcs.get_bundle_submodules()
|
||||
lib_list_header = [
|
||||
"# Adafruit CircuitPython Library Download Stats",
|
||||
(
|
||||
" "
|
||||
),
|
||||
"### Here is a listing of current Adafruit CircuitPython libraries download statistics.",
|
||||
f"**There are {len(submodules_list)} libraries available.**\n",
|
||||
"",
|
||||
]
|
||||
|
||||
submodules_stats = dl_stats.retrieve_pypi_stats(submodules_list)
|
||||
stats_lines = [
|
||||
"| Library (PyPI Package) | Downloads in the Last 7 Days |",
|
||||
"| --- | --- |",
|
||||
]
|
||||
total_downloads = 0
|
||||
blinka_downloads = 0
|
||||
for download_stat in submodules_stats:
|
||||
if download_stat.name == "adafruit-blinka":
|
||||
blinka_downloads = download_stat.num_downloads
|
||||
continue
|
||||
repo_name_comps = download_stat.name.split("-")
|
||||
searchable_repo_name = " ".join(repo_name_comps)
|
||||
try:
|
||||
start_index = lib_list_full.lower().index(searchable_repo_name)
|
||||
except ValueError:
|
||||
continue
|
||||
printable_repo_name = lib_list_full[
|
||||
start_index : start_index + len(searchable_repo_name)
|
||||
]
|
||||
stats_lines.append(
|
||||
f"| {printable_repo_name} ({download_stat.name}) | "
|
||||
f"{download_stat.num_downloads} downloads |"
|
||||
)
|
||||
total_downloads += download_stat.num_downloads
|
||||
|
||||
lib_list_header.append(
|
||||
f"**Total PyPI library downloads in the last 7 days: {total_downloads}** "
|
||||
)
|
||||
lib_list_header.append("")
|
||||
|
||||
with open(
|
||||
os.path.join(bundle_path, "circuitpython_library_pypi_stats.md"), "w"
|
||||
) as md_file:
|
||||
# Write headers
|
||||
md_file.write("\n".join(lib_list_header))
|
||||
md_file.write("\n")
|
||||
|
||||
# Write library stats table
|
||||
for line in stats_lines:
|
||||
md_file.write(line + "\n")
|
||||
|
||||
# Write Blika intro text
|
||||
md_file.write("\n")
|
||||
md_file.write("## Blinka\n")
|
||||
md_file.write("\n")
|
||||
md_file.write(
|
||||
"Blinka is our CircuitPython compatibility layer for MicroPython\n"
|
||||
)
|
||||
md_file.write("and single board computers such as the Raspberry Pi.\n")
|
||||
|
||||
# Write Blinka stats table
|
||||
md_file.write("\n")
|
||||
md_file.write("| Blinka (PyPI Package) | Downloads in the Last 7 Days |\n")
|
||||
md_file.write("| --- | --- |\n")
|
||||
md_file.write(f"| Adafruit Blinka (adafruit-blinka) | {blinka_downloads} |\n")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
# pylint: disable=too-many-locals
|
||||
def check_lib_links_md(bundle_path):
|
||||
"""Checks and updates the `circuitpython_library_list` Markdown document
|
||||
|
|
@ -84,6 +149,7 @@ def check_lib_links_md(bundle_path):
|
|||
submodules_list = sorted(
|
||||
common_funcs.get_bundle_submodules(), key=lambda module: module[1]["path"]
|
||||
)
|
||||
submodules_list = common_funcs.get_bundle_submodules()
|
||||
|
||||
lib_count = len(submodules_list)
|
||||
# used to generate commit message by comparing new libs to current list
|
||||
|
|
@ -125,10 +191,10 @@ def check_lib_links_md(bundle_path):
|
|||
lib_list_header = [
|
||||
"# Adafruit CircuitPython Libraries",
|
||||
(
|
||||
""
|
||||
" "
|
||||
),
|
||||
"Here is a listing of current Adafruit CircuitPython Libraries.",
|
||||
"Here is a listing of current Adafruit CircuitPython Libraries. ",
|
||||
f"There are {lib_count} libraries available.\n",
|
||||
"## Drivers:\n",
|
||||
]
|
||||
|
|
@ -207,7 +273,8 @@ def update_bundle(bundle_path):
|
|||
# They will contain a '-' in the tag, such as '3.0.0-beta.5'.
|
||||
# --exclude must be before --tags.
|
||||
# sh fails to find the subcommand so we use subprocess.
|
||||
subprocess.run(
|
||||
|
||||
subprocess.run( # pylint: disable=subprocess-run-check
|
||||
shlex.split(
|
||||
"git submodule foreach 'git checkout -q "
|
||||
"`git rev-list --exclude='*-*' --tags --max-count=1`'"
|
||||
|
|
@ -217,6 +284,7 @@ def update_bundle(bundle_path):
|
|||
status = StringIO()
|
||||
git.status("--short", _out=status)
|
||||
updates = []
|
||||
release_required = False
|
||||
status = status.getvalue().strip()
|
||||
if status:
|
||||
for status_line in status.split("\n"):
|
||||
|
|
@ -237,6 +305,7 @@ def update_bundle(bundle_path):
|
|||
url = repo_remote_url(directory)
|
||||
summary = "\n".join(diff_lines[1:-1])
|
||||
updates.append((url[:-4], old_commit, new_commit, summary))
|
||||
release_required = True
|
||||
os.chdir(working_directory)
|
||||
lib_list_updates = check_lib_links_md(bundle_path)
|
||||
if lib_list_updates:
|
||||
|
|
@ -253,8 +322,21 @@ def update_bundle(bundle_path):
|
|||
),
|
||||
)
|
||||
)
|
||||
release_required = True
|
||||
if update_download_stats(bundle_path):
|
||||
updates.append(
|
||||
(
|
||||
(
|
||||
"https://github.com/adafruit/Adafruit_CircuitPython_Bundle/"
|
||||
"circuitpython_library_list.md"
|
||||
),
|
||||
"NA",
|
||||
"NA",
|
||||
" > Updated download stats for the libraries",
|
||||
)
|
||||
)
|
||||
|
||||
return updates
|
||||
return updates, release_required
|
||||
|
||||
|
||||
def commit_updates(bundle_path, update_info):
|
||||
|
|
@ -301,7 +383,7 @@ def get_contributors(repo, commit_range):
|
|||
author = REDIS.get("github_username:" + author_email)
|
||||
committer = REDIS.get("github_username:" + committer_email)
|
||||
if not author or not committer:
|
||||
github_commit_info = github.get("/repos/" + repo + "/commits/" + sha)
|
||||
github_commit_info = gh_reqs.get("/repos/" + repo + "/commits/" + sha)
|
||||
github_commit_info = github_commit_info.json()
|
||||
if github_commit_info["author"]:
|
||||
author = github_commit_info["author"]["login"]
|
||||
|
|
@ -349,7 +431,7 @@ def new_release(bundle, bundle_path):
|
|||
working_directory = os.path.abspath(os.getcwd())
|
||||
os.chdir(bundle_path)
|
||||
print(bundle)
|
||||
current_release = github.get("/repos/adafruit/{}/releases/latest".format(bundle))
|
||||
current_release = gh_reqs.get("/repos/adafruit/{}/releases/latest".format(bundle))
|
||||
last_tag = current_release.json()["tag_name"]
|
||||
contributors = get_contributors("adafruit/" + bundle, last_tag + "..")
|
||||
added_submodules = []
|
||||
|
|
@ -424,7 +506,7 @@ def new_release(bundle, bundle_path):
|
|||
release_description.append(
|
||||
"The libraries in each release are compiled for all recent major versions of CircuitPython."
|
||||
" Please download the one that matches the major version of your CircuitPython. For example"
|
||||
", if you are running 6.0.0 you should download the `6.x` bundle.\n"
|
||||
", if you are running 8.2.6 you should download the `8.x` bundle.\n"
|
||||
)
|
||||
|
||||
release_description.append(
|
||||
|
|
@ -447,7 +529,7 @@ def new_release(bundle, bundle_path):
|
|||
|
||||
print("Releasing {}".format(release["tag_name"]))
|
||||
print(release["body"])
|
||||
response = github.post("/repos/adafruit/" + bundle + "/releases", json=release)
|
||||
response = gh_reqs.post("/repos/adafruit/" + bundle + "/releases", json=release)
|
||||
if not response.ok:
|
||||
print("Failed to create release")
|
||||
print(release)
|
||||
|
|
@ -466,9 +548,9 @@ if __name__ == "__main__":
|
|||
bundle_dir = os.path.join(bundles_dir, cp_bundle)
|
||||
try:
|
||||
fetch_bundle(cp_bundle, bundle_dir)
|
||||
updates_needed = update_bundle(bundle_dir)
|
||||
if updates_needed:
|
||||
commit_updates(bundle_dir, updates_needed)
|
||||
updates, release_required = update_bundle(bundle_dir)
|
||||
if release_required:
|
||||
commit_updates(bundle_dir, updates)
|
||||
push_updates(bundle_dir)
|
||||
new_release(cp_bundle, bundle_dir)
|
||||
except RuntimeError as e:
|
||||
|
|
|
|||
|
|
@ -1,25 +1,7 @@
|
|||
# The MIT License (MIT)
|
||||
# SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries
|
||||
# SPDX-FileCopyrightText: 2019 Michael Schroeder
|
||||
#
|
||||
# Copyright (c) 2017 Scott Shawcroft for Adafruit Industries
|
||||
# 2019 Michael Schroeder
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""Adabot utility for CircuitPython Libraries."""
|
||||
|
||||
|
|
@ -31,14 +13,17 @@ import re
|
|||
import sys
|
||||
import traceback
|
||||
|
||||
from adabot import github_requests as github
|
||||
from adabot import github_requests as gh_reqs
|
||||
from adabot import pypi_requests as pypi
|
||||
from adabot.lib import circuitpython_library_validators as cirpy_lib_vals
|
||||
from adabot.lib import common_funcs
|
||||
from adabot.lib import assign_hacktober_label as hacktober
|
||||
from adabot.lib import blinka_funcs
|
||||
from adabot.lib import bundle_announcer
|
||||
from adabot import circuitpython_library_download_stats as dl_stats
|
||||
|
||||
from .gh_interface import GH_INTERFACE
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
ch = logging.StreamHandler(stream=sys.stdout)
|
||||
logging.basicConfig(level=logging.INFO, format="%(message)s", handlers=[ch])
|
||||
|
|
@ -104,6 +89,7 @@ blinka_repos = [
|
|||
"Adafruit_Python_Extended_Bus",
|
||||
]
|
||||
|
||||
|
||||
# pylint: disable=too-many-locals, too-many-branches, too-many-statements
|
||||
def run_library_checks(validators, kw_args, error_depth):
|
||||
"""runs the various library checking functions"""
|
||||
|
|
@ -113,7 +99,7 @@ def run_library_checks(validators, kw_args, error_depth):
|
|||
pylint_info = pypi.get("/pypi/pylint/json")
|
||||
if pylint_info and pylint_info.ok:
|
||||
latest_pylint = pylint_info.json()["info"]["version"]
|
||||
logger.info("Latest pylint is: %s", latest_pylint)
|
||||
# logger.info("Latest pylint is: %s", latest_pylint)
|
||||
|
||||
repos = common_funcs.list_repos(
|
||||
include_repos=tuple(blinka_repos)
|
||||
|
|
@ -135,8 +121,6 @@ def run_library_checks(validators, kw_args, error_depth):
|
|||
repo_needs_work = []
|
||||
since = datetime.datetime.now() - datetime.timedelta(days=7)
|
||||
repos_by_error = {}
|
||||
new_libs = {}
|
||||
updated_libs = {}
|
||||
|
||||
validator = cirpy_lib_vals.LibraryValidator(
|
||||
validators, bundle_submodules, latest_pylint, **kw_args
|
||||
|
|
@ -183,17 +167,16 @@ def run_library_checks(validators, kw_args, error_depth):
|
|||
logger.info(", ".join(validator.output_file_data))
|
||||
validator.output_file_data.clear()
|
||||
|
||||
# get a list of new & updated libraries for the last week
|
||||
if repo["name"] != "Adafruit_CircuitPython_Bundle":
|
||||
check_releases = common_funcs.is_new_or_updated(repo)
|
||||
if check_releases == "new":
|
||||
new_libs[repo["name"]] = repo["html_url"]
|
||||
elif check_releases == "updated":
|
||||
updated_libs[repo["name"]] = repo["html_url"]
|
||||
|
||||
logger.info("")
|
||||
logger.info("State of CircuitPython + Libraries + Blinka")
|
||||
|
||||
logger.info("")
|
||||
logger.info("**This report contains information from the previous seven days.**")
|
||||
logger.info(
|
||||
"**Any changes (PRs merged, etc.) made today are not included in this report.**"
|
||||
)
|
||||
logger.info("")
|
||||
|
||||
logger.info("### Overall")
|
||||
print_pr_overview(lib_insights, core_insights, blinka_insights)
|
||||
print_issue_overview(lib_insights, core_insights, blinka_insights)
|
||||
|
|
@ -213,15 +196,17 @@ def run_library_checks(validators, kw_args, error_depth):
|
|||
logger.info("* %s open issues", len(core_insights["open_issues"]))
|
||||
logger.info(" * https://github.com/adafruit/circuitpython/issues")
|
||||
logger.info("* %s active milestones", len(core_insights["milestones"]))
|
||||
ms_count = 0
|
||||
for milestone in sorted(core_insights["milestones"].keys()):
|
||||
ms_count += core_insights["milestones"][milestone]
|
||||
logger.info(
|
||||
" * %s: %s open issues", milestone, core_insights["milestones"][milestone]
|
||||
)
|
||||
for milestone, milestone_issue_count in sorted(core_insights["milestones"].items()):
|
||||
logger.info(" * %s: %s open issues", milestone, milestone_issue_count)
|
||||
no_milestone_items = gh_reqs.get(
|
||||
"/repos/adafruit/circuitpython/issues?milestone=none"
|
||||
).json()
|
||||
no_milestone_issues = [
|
||||
item for item in no_milestone_items if "pull_request" not in item
|
||||
]
|
||||
logger.info(
|
||||
" * %s issues not assigned a milestone",
|
||||
len(core_insights["open_issues"]) - ms_count,
|
||||
" * %s issues not assigned a milestone",
|
||||
len(no_milestone_issues),
|
||||
)
|
||||
logger.info("")
|
||||
|
||||
|
|
@ -229,9 +214,38 @@ def run_library_checks(validators, kw_args, error_depth):
|
|||
# - GitHub API has been broken, due to the number of release artifacts
|
||||
# - Release asset delivery is being moved to AWS CloudFront/S3
|
||||
# print_circuitpython_dl_stats()
|
||||
logger.info("* Core download stats available at https://circuitpython.org/stats")
|
||||
|
||||
logger.info("")
|
||||
# Get PyPI stats
|
||||
have_secrets = False
|
||||
|
||||
ada_bundle = GH_INTERFACE.get_repo("adafruit/Adafruit_CircuitPython_Bundle")
|
||||
file_contents = ada_bundle.get_contents("circuitpython_library_pypi_stats.md")
|
||||
stats_contents = file_contents.decoded_content.decode("utf-8").split("\n")
|
||||
lib_stats = {}
|
||||
total_library_pypi_stats = 0
|
||||
blinka_pypi_downloads = 0
|
||||
in_lib_stats = False
|
||||
for line in stats_contents:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
if line.startswith("| Adafruit Blinka (adafruit-blinka) |"):
|
||||
blinka_pypi_downloads = int(line[38:-2])
|
||||
continue
|
||||
if line.startswith("**Total PyPI library downloads in the last 7 days:"):
|
||||
total_library_pypi_stats = int(line[51:-2])
|
||||
continue
|
||||
if line.startswith("|"):
|
||||
parts = [part.strip() for part in line.split("|") if part.strip()]
|
||||
if parts[0] in ("Library (PyPI Package)", "---"):
|
||||
in_lib_stats = True
|
||||
continue
|
||||
if in_lib_stats:
|
||||
lib_stats[parts[0]] = int(parts[1][:-10])
|
||||
else:
|
||||
in_lib_stats = False
|
||||
have_secrets = True
|
||||
|
||||
logger.info("### Libraries")
|
||||
print_pr_overview(lib_insights)
|
||||
logger.info(" * Merged pull requests:")
|
||||
|
|
@ -263,15 +277,49 @@ def run_library_checks(validators, kw_args, error_depth):
|
|||
|
||||
logger.info("* https://circuitpython.org/contributing")
|
||||
|
||||
logger.info("Library updates in the last seven days:")
|
||||
if len(new_libs) != 0:
|
||||
logger.info("**New Libraries**")
|
||||
for title, link in new_libs.items():
|
||||
logger.info(" * [%s](%s)", title, link)
|
||||
if len(updated_libs) != 0:
|
||||
logger.info("**Updated Libraries**")
|
||||
for title, link in updated_libs.items():
|
||||
logger.info(" * [%s](%s)", title, link)
|
||||
logger.info("")
|
||||
logger.info("#### Library PyPI Weekly Download Stats")
|
||||
# if pypi_stats:
|
||||
if have_secrets:
|
||||
logger.info("* **Total Library Stats**")
|
||||
logger.info(
|
||||
" * %d PyPI downloads over %d libraries",
|
||||
total_library_pypi_stats,
|
||||
len(lib_stats),
|
||||
)
|
||||
logger.info("* **Top 10 Libraries by PyPI Downloads**")
|
||||
for index, (lib_stat_name, lib_stat_dl) in enumerate(lib_stats.items()):
|
||||
if index == 10:
|
||||
break
|
||||
logger.info(
|
||||
" * %s: %d",
|
||||
lib_stat_name,
|
||||
lib_stat_dl,
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
"Secrets unavailable, cannot report PyPI download stats for libraries"
|
||||
)
|
||||
logger.info("*This is normal for CI runs from PRs*")
|
||||
|
||||
new_libs, updated_libs = bundle_announcer.get_adafruit_bundle_updates()
|
||||
(
|
||||
new_community_libs,
|
||||
updated_community_libs,
|
||||
) = bundle_announcer.get_community_bundle_updates()
|
||||
|
||||
logger.info("")
|
||||
logger.info("#### Library updates in the last seven days:")
|
||||
if new_libs or new_community_libs:
|
||||
logger.info("* **New Libraries**")
|
||||
for new_lib_category in (new_libs, new_community_libs):
|
||||
for title, link in new_lib_category:
|
||||
logger.info(" * [%s](%s)", title, link)
|
||||
if updated_libs or updated_community_libs:
|
||||
logger.info("* **Updated Libraries**")
|
||||
for updated_lib_category in (updated_libs, updated_community_libs):
|
||||
for title, link in updated_lib_category:
|
||||
logger.info(" * [%s](%s)", title, link)
|
||||
|
||||
if len(validators) != 0:
|
||||
lib_repos = []
|
||||
|
|
@ -310,7 +358,19 @@ def run_library_checks(validators, kw_args, error_depth):
|
|||
print_issue_overview(blinka_insights)
|
||||
logger.info("* %s open issues", len(blinka_insights["open_issues"]))
|
||||
logger.info(" * https://github.com/adafruit/Adafruit_Blinka/issues")
|
||||
blinka_dl = dl_stats.piwheels_stats().get("adafruit-blinka", {}).get("month", "N/A")
|
||||
blinka_dl = (
|
||||
dl_stats.retrieve_piwheels_stats()
|
||||
.get("adafruit-blinka", {})
|
||||
.get("month", "N/A")
|
||||
)
|
||||
if have_secrets:
|
||||
logger.info(
|
||||
"* PyPI downloads in the last week: %d",
|
||||
blinka_pypi_downloads,
|
||||
)
|
||||
else:
|
||||
logger.info("Secrets unavailable, cannot report PyPI download stats for Blinka")
|
||||
logger.info("*This is normal for CI runs from PRs*")
|
||||
logger.info("* Piwheels Downloads in the last month: %s", blinka_dl)
|
||||
logger.info("Number of supported boards: %s", blinka_funcs.board_count())
|
||||
|
||||
|
|
@ -325,7 +385,7 @@ def print_circuitpython_dl_stats():
|
|||
# enable this.
|
||||
|
||||
try:
|
||||
response = github.get("/repos/adafruit/circuitpython/releases")
|
||||
response = gh_reqs.get("/repos/adafruit/circuitpython/releases")
|
||||
except (ValueError, RuntimeError):
|
||||
logger.info("Core CircuitPython GitHub download statistics request failed.")
|
||||
return
|
||||
|
|
|
|||
|
|
@ -1,29 +1,12 @@
|
|||
# The MIT License (MIT)
|
||||
# SPDX-FileCopyrightText: 2018 Michael Schroeder
|
||||
#
|
||||
# Copyright (c) 2018 Michael Schroeder
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
""" Collects download stats for the Adafruit CircuitPython Library Bundles
|
||||
and each library.
|
||||
"""
|
||||
|
||||
import os
|
||||
import datetime
|
||||
import sys
|
||||
import argparse
|
||||
|
|
@ -31,7 +14,10 @@ import traceback
|
|||
import operator
|
||||
import requests
|
||||
|
||||
from adabot import github_requests as github
|
||||
from google.cloud import bigquery
|
||||
import google.oauth2.service_account
|
||||
|
||||
from adabot import github_requests as gh_reqs
|
||||
from adabot.lib import common_funcs
|
||||
|
||||
# Setup ArgumentParser
|
||||
|
|
@ -71,7 +57,7 @@ PYPI_FORCE_NON_CIRCUITPYTHON = ["Adafruit-Blinka"]
|
|||
PIWHEELS_PACKAGES_URL = "https://www.piwheels.org/packages.json"
|
||||
|
||||
|
||||
def piwheels_stats():
|
||||
def retrieve_piwheels_stats():
|
||||
"""Get data dump of piwheels download stats"""
|
||||
stats = {}
|
||||
response = requests.get(PIWHEELS_PACKAGES_URL)
|
||||
|
|
@ -86,12 +72,12 @@ def piwheels_stats():
|
|||
return stats
|
||||
|
||||
|
||||
def get_pypi_stats():
|
||||
def parse_piwheels_stats():
|
||||
"""Map piwheels download stats for each repo"""
|
||||
successful_stats = {}
|
||||
failed_stats = []
|
||||
repos = common_funcs.list_repos()
|
||||
dl_stats = piwheels_stats()
|
||||
dl_stats = retrieve_piwheels_stats()
|
||||
for repo in repos:
|
||||
if repo["owner"]["login"] == "adafruit" and repo["name"].startswith(
|
||||
"Adafruit_CircuitPython"
|
||||
|
|
@ -119,13 +105,68 @@ def get_pypi_stats():
|
|||
return successful_stats, failed_stats
|
||||
|
||||
|
||||
def retrieve_pypi_stats(submodules, additional_packages=("adafruit-blinka",)):
|
||||
"""Get data dump of PyPI download stats (for the last 7 days)"""
|
||||
# Create access info dictionary
|
||||
access_info = {
|
||||
"private_key": os.environ["BIGQUERY_PRIVATE_KEY"],
|
||||
"client_email": os.environ["BIGQUERY_CLIENT_EMAIL"],
|
||||
"token_uri": "https://oauth2.googleapis.com/token",
|
||||
}
|
||||
|
||||
# Use credentials to create a BigQuery client object
|
||||
credentials = google.oauth2.service_account.Credentials.from_service_account_info(
|
||||
access_info
|
||||
)
|
||||
client = bigquery.Client("circuitpython-stats", credentials=credentials)
|
||||
|
||||
# Get the list of PyPI package names
|
||||
packages = []
|
||||
for submod in submodules:
|
||||
url: str = submod[1]["url"]
|
||||
pre_name = url.split("/")[-1][:-4]
|
||||
packages.append(pre_name.replace("_", "-").lower())
|
||||
for addpack in additional_packages:
|
||||
packages.append(addpack)
|
||||
|
||||
# Construct the query to use
|
||||
query = """
|
||||
SELECT
|
||||
file.project as name, COUNT(*) AS num_downloads,
|
||||
FROM
|
||||
`bigquery-public-data.pypi.file_downloads`
|
||||
WHERE DATE(timestamp)
|
||||
BETWEEN DATE_TRUNC(DATE_SUB(CURRENT_DATE(), INTERVAL 7 DAY), DAY)
|
||||
AND DATE_TRUNC(DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY), DAY)
|
||||
AND file.project in (
|
||||
"""
|
||||
packages_query = ["?" for _ in packages]
|
||||
query_parameters = [
|
||||
bigquery.ScalarQueryParameter(None, "STRING", package) for package in packages
|
||||
]
|
||||
query += ",".join(packages_query)
|
||||
query += """
|
||||
)
|
||||
GROUP BY file.project
|
||||
ORDER BY num_downloads DESC
|
||||
"""
|
||||
|
||||
# Configure and run the query
|
||||
job_config = bigquery.QueryJobConfig(query_parameters=query_parameters)
|
||||
query_job = client.query(
|
||||
query,
|
||||
job_config=job_config,
|
||||
)
|
||||
return query_job.result()
|
||||
|
||||
|
||||
def get_bundle_stats(bundle):
|
||||
"""Returns the download stats for 'bundle'. Uses release tag names to compile download
|
||||
stats for the last 7 days. This assumes an Adabot release within that time frame, and
|
||||
that tag name(s) will be the date (YYYYMMDD).
|
||||
"""
|
||||
stats_dict = {}
|
||||
bundle_stats = github.get("/repos/adafruit/" + bundle + "/releases")
|
||||
bundle_stats = gh_reqs.get("/repos/adafruit/" + bundle + "/releases")
|
||||
if not bundle_stats.ok:
|
||||
return {"Failed to retrieve bundle stats": bundle_stats.text}
|
||||
start_date = datetime.date.today()
|
||||
|
|
@ -190,7 +231,7 @@ def run_stat_check():
|
|||
]
|
||||
output_handler("Adafruit CircuitPython Library Piwheels downloads:")
|
||||
output_handler()
|
||||
pypi_downloads, pypi_failures = get_pypi_stats()
|
||||
pypi_downloads, pypi_failures = parse_piwheels_stats()
|
||||
for stat in sorted(
|
||||
pypi_downloads.items(), key=operator.itemgetter(1, 1), reverse=True
|
||||
):
|
||||
|
|
|
|||
|
|
@ -1,24 +1,6 @@
|
|||
# The MIT License (MIT)
|
||||
# SPDX-FileCopyrightText: 2019 Michael Schroeder
|
||||
#
|
||||
# Copyright (c) 2019 Michael Schroeder
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""Adabot utility for applying patches to all CircuitPython Libraries."""
|
||||
|
||||
|
|
|
|||
12
adabot/gh_interface.py
Normal file
12
adabot/gh_interface.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
# SPDX-FileCopyrightText: 2023 Jeff Epler
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
"""Get a properly-configured Github() object"""
|
||||
import os
|
||||
|
||||
import github
|
||||
|
||||
GH_TOKEN = os.environ.get("ADABOT_GITHUB_ACCESS_TOKEN")
|
||||
GH_INTERFACE = github.Github(
|
||||
auth=github.Auth.Token(GH_TOKEN) if GH_TOKEN else None, retry=github.GithubRetry()
|
||||
)
|
||||
|
|
@ -1,24 +1,6 @@
|
|||
# The MIT License (MIT)
|
||||
# SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries
|
||||
#
|
||||
# Copyright (c) 2017 Scott Shawcroft for Adafruit Industries
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""Wrapper for GitHub requests."""
|
||||
|
||||
|
|
@ -100,6 +82,9 @@ def request(method, url, **kwargs):
|
|||
"See log for error text that has been sanitized for secrets"
|
||||
) from None
|
||||
|
||||
if not from_cache:
|
||||
if remaining % 100 == 0 or remaining < 20:
|
||||
logging.info("%d requests remaining this hour", remaining)
|
||||
if not from_cache and remaining <= 1:
|
||||
rate_limit_reset = datetime.datetime.fromtimestamp(
|
||||
int(response.headers["X-RateLimit-Reset"])
|
||||
|
|
@ -107,6 +92,11 @@ def request(method, url, **kwargs):
|
|||
logging.warning(
|
||||
"GitHub API Rate Limit reached. Pausing until Rate Limit reset."
|
||||
)
|
||||
# This datetime.now() is correct, *because* `fromtimestamp` above
|
||||
# converts the timestamp into local time, same as now(). This is
|
||||
# different than the sites that use GH_INTERFACE.get_rate_limit, in
|
||||
# which the rate limit is a UTC time, so it has to be compared to
|
||||
# utcnow.
|
||||
while datetime.datetime.now() < rate_limit_reset:
|
||||
logging.warning("Rate Limit will reset at: %s", rate_limit_reset)
|
||||
reset_diff = rate_limit_reset - datetime.datetime.now()
|
||||
|
|
@ -114,9 +104,6 @@ def request(method, url, **kwargs):
|
|||
logging.info("Sleeping %s seconds", reset_diff.seconds)
|
||||
time.sleep(reset_diff.seconds + 1)
|
||||
|
||||
if remaining % 100 == 0:
|
||||
logging.info(remaining, "requests remaining this hour")
|
||||
|
||||
return response
|
||||
|
||||
|
||||
|
|
|
|||
0
adabot/lib/__init__.py
Normal file
0
adabot/lib/__init__.py
Normal file
|
|
@ -1,24 +1,6 @@
|
|||
# The MIT License (MIT)
|
||||
# SPDX-FileCopyrightText: 2019 Michael Schroeder
|
||||
#
|
||||
# Copyright (c) 2019 Michael Schroeder
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""An utility to automatically apply the 'hacktoberfest' label to open issues
|
||||
marked as 'good first issue', during DigitalOcean's/GitHub's Hacktoberfest
|
||||
|
|
@ -29,7 +11,7 @@ import argparse
|
|||
import datetime
|
||||
import requests
|
||||
|
||||
from adabot import github_requests as github
|
||||
from adabot import github_requests as gh_reqs
|
||||
from adabot.lib import common_funcs
|
||||
|
||||
cli_args = argparse.ArgumentParser(description="Hacktoberfest Label Assigner")
|
||||
|
|
@ -79,7 +61,7 @@ def get_open_issues(repo):
|
|||
params = {
|
||||
"state": "open",
|
||||
}
|
||||
response = github.get("/repos/" + repo["full_name"] + "/issues", params=params)
|
||||
response = gh_reqs.get("/repos/" + repo["full_name"] + "/issues", params=params)
|
||||
if not response.ok:
|
||||
print(f"Failed to retrieve issues for '{repo['name']}'")
|
||||
return False
|
||||
|
|
@ -102,7 +84,7 @@ def ensure_hacktober_label_exists(repo, dry_run=False):
|
|||
"""Checks if the 'Hacktoberfest' label exists on the repo.
|
||||
If not, creates the label.
|
||||
"""
|
||||
response = github.get(f"/repos/{repo['full_name']}/labels")
|
||||
response = gh_reqs.get(f"/repos/{repo['full_name']}/labels")
|
||||
if not response.ok:
|
||||
print(f"Failed to retrieve labels for '{repo['name']}'")
|
||||
return False
|
||||
|
|
@ -117,7 +99,7 @@ def ensure_hacktober_label_exists(repo, dry_run=False):
|
|||
"description": "DigitalOcean's Hacktoberfest",
|
||||
}
|
||||
if not dry_run:
|
||||
result = github.post(f"/repos/{repo['full_name']}/labels", json=params)
|
||||
result = gh_reqs.post(f"/repos/{repo['full_name']}/labels", json=params)
|
||||
if not result.status_code == 201:
|
||||
print(f"Failed to create new Hacktoberfest label for: {repo['name']}")
|
||||
return False
|
||||
|
|
@ -154,9 +136,10 @@ def assign_hacktoberfest(repo, issues=None, remove_labels=False, dry_run=False):
|
|||
update_issue = True
|
||||
|
||||
if update_issue:
|
||||
label_names.append("Hacktoberfest")
|
||||
params = {"labels": label_names}
|
||||
if not dry_run:
|
||||
result = github.patch(
|
||||
result = gh_reqs.patch(
|
||||
f"/repos/{repo['full_name']}/issues/{str(issue['number'])}",
|
||||
json=params,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,28 +1,10 @@
|
|||
# The MIT License (MIT)
|
||||
# SPDX-FileCopyrightText: 2019 Michael Schroeder
|
||||
#
|
||||
# Copyright (c) 2019 Michael Schroeder
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""Common functions used with Adabot & Blinka interactions."""
|
||||
|
||||
from adabot import github_requests as github
|
||||
from adabot import github_requests as gh_reqs
|
||||
|
||||
|
||||
def board_count():
|
||||
|
|
@ -31,7 +13,7 @@ def board_count():
|
|||
"""
|
||||
count = 0
|
||||
cirpy_org_url = "/repos/adafruit/circuitpython-org/contents/_blinka"
|
||||
response = github.get(cirpy_org_url)
|
||||
response = gh_reqs.get(cirpy_org_url)
|
||||
if response.ok:
|
||||
response_json = response.json()
|
||||
count = len(response_json)
|
||||
|
|
|
|||
94
adabot/lib/bundle_announcer.py
Normal file
94
adabot/lib/bundle_announcer.py
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
# SPDX-FileCopyrightText: 2022 Alec Delaney, for Adafruit Industries
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Checks for the latest releases in the Community bundle based
|
||||
on the automated release.
|
||||
|
||||
* Author(s): Alec Delaney
|
||||
"""
|
||||
|
||||
import datetime
|
||||
from typing import Tuple, Set
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
import github as pygithub
|
||||
import parse
|
||||
|
||||
from ..gh_interface import GH_INTERFACE
|
||||
|
||||
RepoResult: TypeAlias = Tuple[str, str]
|
||||
"""(Submodule Name, Full Repo Name)"""
|
||||
|
||||
|
||||
def get_community_bundle_updates() -> Tuple[Set[RepoResult], Set[RepoResult]]:
|
||||
"""Get new and updated libraries in the Community Bundle"""
|
||||
return get_bundle_updates("adafruit/CircuitPython_Community_Bundle")
|
||||
|
||||
|
||||
def get_adafruit_bundle_updates() -> Tuple[Set[RepoResult], Set[RepoResult]]:
|
||||
"""Get new and updated libraries in the Adafruit Bundle"""
|
||||
return get_bundle_updates("adafruit/Adafruit_CircuitPython_Bundle")
|
||||
|
||||
|
||||
# pylint: disable=too-many-locals
|
||||
def get_bundle_updates(full_repo_name: str) -> Tuple[Set[RepoResult], Set[RepoResult]]:
|
||||
"""
|
||||
Get the updates to the Community Bundle.
|
||||
|
||||
Returns new and updated libraries
|
||||
"""
|
||||
try:
|
||||
repository = GH_INTERFACE.get_repo(full_repo_name)
|
||||
seven_days_ago = datetime.datetime.now() - datetime.timedelta(days=7)
|
||||
recent_releases = [
|
||||
release
|
||||
for release in repository.get_releases()
|
||||
if release.created_at > seven_days_ago
|
||||
]
|
||||
new_libs = set()
|
||||
updated_libs = set()
|
||||
for recent_release in recent_releases:
|
||||
relevant_lines = [
|
||||
line
|
||||
for line in recent_release.body.split("\n")
|
||||
if line.startswith("Updated libraries")
|
||||
or line.startswith("New libraries:")
|
||||
]
|
||||
for relevant_line in relevant_lines:
|
||||
lib_components = [x.strip(",") for x in relevant_line.split(" ")[2:]]
|
||||
for lib in lib_components:
|
||||
comps = parse.parse("[{name:S}]({link_comp:S})", lib)
|
||||
link: str = parse.search("{link:S}/releases", comps["link_comp"])[
|
||||
"link"
|
||||
]
|
||||
full_name = parse.search("https://github.com/{full_name:S}", link)[
|
||||
"full_name"
|
||||
]
|
||||
if relevant_line.startswith("Updated libraries"):
|
||||
updated_libs.add((full_name, link))
|
||||
else:
|
||||
new_libs.add((full_name, link))
|
||||
return (new_libs, updated_libs)
|
||||
except pygithub.GithubException:
|
||||
# Secrets may not be available or error occurred - just skip
|
||||
return (set(), set())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
adafruit_results = get_adafruit_bundle_updates()
|
||||
community_results = get_community_bundle_updates()
|
||||
for new_adafruit_lib in adafruit_results[0]:
|
||||
print(f"New libraries: {new_adafruit_lib[0]} { {new_adafruit_lib[1]} }")
|
||||
for updated_adafruit_lib in adafruit_results[1]:
|
||||
print(
|
||||
f"Updated libraries: {updated_adafruit_lib[0]} { {updated_adafruit_lib[1]} }"
|
||||
)
|
||||
print("-----")
|
||||
for new_community_lib in community_results[0]:
|
||||
print(f"New libraries: {new_community_lib[0]} { {new_community_lib[1]} }")
|
||||
for updated_community_lib in community_results[1]:
|
||||
print(
|
||||
f"Updated libraries: {updated_community_lib[0]} { {updated_community_lib[1]} }"
|
||||
)
|
||||
|
|
@ -1,65 +1,30 @@
|
|||
# pylint: disable=no-self-use
|
||||
# The MIT License (MIT)
|
||||
# SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries
|
||||
#
|
||||
# Copyright (c) 2017 Scott Shawcroft for Adafruit Industries
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
# pylint: disable=no-self-use,too-many-lines
|
||||
|
||||
"""Collection of validator methods to maintain a standard, as well as detect
|
||||
errors, across the entire CirtuitPython library ecosystem."""
|
||||
|
||||
import datetime
|
||||
from io import StringIO
|
||||
import json
|
||||
import logging
|
||||
import pathlib
|
||||
import os
|
||||
import re
|
||||
from tempfile import TemporaryDirectory
|
||||
import time
|
||||
|
||||
from packaging.version import parse as pkg_version_parse
|
||||
from packaging.requirements import Requirement, InvalidRequirement
|
||||
|
||||
from pylint import lint
|
||||
from pylint.reporters import JSONReporter
|
||||
|
||||
import requests
|
||||
|
||||
import sh
|
||||
from sh.contrib import git
|
||||
|
||||
import yaml
|
||||
import parse
|
||||
|
||||
from adabot import github_requests as github
|
||||
import github as pygithub
|
||||
from adabot import github_requests as gh_reqs
|
||||
from adabot.lib import common_funcs
|
||||
from adabot.lib import assign_hacktober_label as hacktober
|
||||
|
||||
|
||||
class CapturedJsonReporter(JSONReporter):
|
||||
"""Helper class to stringify PyLint JSON reports."""
|
||||
|
||||
def __init__(self):
|
||||
self._stringio = StringIO()
|
||||
super().__init__(self._stringio)
|
||||
|
||||
def get_result(self):
|
||||
"""The current value."""
|
||||
return self._stringio.getvalue()
|
||||
from ..gh_interface import GH_INTERFACE
|
||||
|
||||
|
||||
# Define constants for error strings to make checking against them more robust:
|
||||
|
|
@ -73,6 +38,7 @@ ERROR_README_MISSING_CI_ACTIONS_BADGE = (
|
|||
"README CI badge needs to be changed to GitHub Actions"
|
||||
)
|
||||
ERROR_PYFILE_DOWNLOAD_FAILED = "Failed to download .py code file"
|
||||
ERROR_TOMLFILE_DOWNLOAD_FAILED = "Failed to download .toml file"
|
||||
ERROR_PYFILE_MISSING_STRUCT = (
|
||||
".py file contains reference to import ustruct"
|
||||
" without reference to import struct. See issue "
|
||||
|
|
@ -93,7 +59,8 @@ ERROR_PYFILE_MISSING_ERRNO = (
|
|||
" without reference to import errno. See issue "
|
||||
"https://github.com/adafruit/circuitpython/issues/1582"
|
||||
)
|
||||
ERROR_MISMATCHED_READTHEDOCS = "Mismatched readthedocs.yml"
|
||||
ERROR_MISMATCHED_READTHEDOCS = "Mismatched readthedocs.yaml"
|
||||
ERROR_MISMATCHED_PRE_COMMIT_CONFIG = "Mismatched versions in .pre-commit-config.yaml"
|
||||
ERROR_MISSING_DESCRIPTION = "Missing repository description"
|
||||
ERROR_MISSING_EXAMPLE_FILES = "Missing .py files in examples folder"
|
||||
ERROR_MISSING_EXAMPLE_FOLDER = "Missing examples folder"
|
||||
|
|
@ -110,9 +77,14 @@ ERROR_MISSING_LICENSE = "Missing license."
|
|||
ERROR_MISSING_LINT = "Missing lint config"
|
||||
ERROR_MISSING_CODE_OF_CONDUCT = "Missing CODE_OF_CONDUCT.md"
|
||||
ERROR_MISSING_README_RST = "Missing README.rst"
|
||||
ERROR_MISSING_READTHEDOCS = "Missing readthedocs.yml"
|
||||
ERROR_MISSING_SETUP_PY = "For pypi compatibility, missing setup.py"
|
||||
ERROR_MISSING_REQUIREMENTS_TXT = "For pypi compatibility, missing requirements.txt"
|
||||
ERROR_MISSING_READTHEDOCS = "Missing readthedocs.yaml"
|
||||
ERROR_MISSING_PYPROJECT_TOML = "For PyPI compatibility, missing pyproject.toml"
|
||||
ERROR_MISSING_PRE_COMMIT_CONFIG = "Missing .pre-commit-config.yaml"
|
||||
ERROR_MISSING_REQUIREMENTS_TXT = "For PyPI compatibility, missing requirements.txt"
|
||||
ERROR_SETUP_PY_EXISTS = "Library uses setup.py, needs to be converted to pyproject.toml"
|
||||
ERROR_MISSING_OPTIONAL_REQUIREMENTS_TXT = (
|
||||
"For PyPI compatibility, missing optional_requirements.txt"
|
||||
)
|
||||
ERROR_MISSING_BLINKA = (
|
||||
"For pypi compatibility, missing Adafruit-Blinka in requirements.txt"
|
||||
)
|
||||
|
|
@ -123,18 +95,22 @@ ERROR_UNABLE_PULL_REPO_DETAILS = "Unable to pull repo details"
|
|||
ERRRO_UNABLE_PULL_REPO_EXAMPLES = "Unable to retrieve examples folder contents"
|
||||
ERROR_WIKI_DISABLED = "Wiki should be disabled"
|
||||
ERROR_ONLY_ALLOW_MERGES = "Only allow merges, disallow rebase and squash"
|
||||
ERROR_RTD_SUBPROJECT_FAILED = "Failed to list CircuitPython subprojects on ReadTheDocs"
|
||||
ERROR_RTD_SUBPROJECT_MISSING = "ReadTheDocs missing as a subproject on CircuitPython"
|
||||
ERROR_RTD_ADABOT_MISSING = "ReadTheDocs project missing adabot as owner"
|
||||
ERROR_RTD_VALID_VERSIONS_FAILED = "Failed to fetch ReadTheDocs valid versions"
|
||||
ERROR_RTD_FAILED_TO_LOAD_BUILDS = "Unable to load builds webpage"
|
||||
ERROR_RTD_FAILED_TO_LOAD_BUILD_INFO = "Failed to load build info"
|
||||
ERROR_RTD_OUTPUT_HAS_WARNINGS = "ReadTheDocs latest build has warnings and/or errors"
|
||||
ERROR_RTD_AUTODOC_FAILED = (
|
||||
"Autodoc failed on ReadTheDocs. (Likely need to automock an import.)"
|
||||
ERROR_RTD_FAILED_TO_LOAD_BUILD_STATUS = (
|
||||
"Failed to load RTD build status (General error)"
|
||||
)
|
||||
ERROR_RTD_SPHINX_FAILED = "Sphinx missing files"
|
||||
ERROR_GITHUB_RELEASE_FAILED = "Failed to fetch latest release from GitHub"
|
||||
ERROR_RTD_FAILED_TO_LOAD_BUILD_STATUS_GH_NONLIMITED = (
|
||||
"Failed to load RTD build status (GitHub error)"
|
||||
)
|
||||
ERROR_RTD_FAILED_TO_LOAD_BUILD_STATUS_RTD_NONLIMITED = (
|
||||
"Failed to load RTD build status (RTD error)"
|
||||
)
|
||||
ERROR_RTD_FAILED_TO_LOAD_BUILD_STATUS_RTD_UNEXPECTED_RETURN = (
|
||||
"Failed to load RTD build status (Unknown error)"
|
||||
)
|
||||
ERROR_RTD_SUBPROJECT_FAILED = "Failed to list CircuitPython subprojects on ReadTheDocs"
|
||||
ERROR_RTD_OUTPUT_HAS_WARNINGS = "ReadTheDocs latest build has warnings and/or errors"
|
||||
ERROR_GITHUB_NO_RELEASE = "Library repository has no releases"
|
||||
ERROR_GITHUB_COMMITS_SINCE_LAST_RELEASE_GTM = (
|
||||
"Library has new commits since last release over a month ago"
|
||||
|
|
@ -157,9 +133,13 @@ ERROR_DRIVERS_PAGE_DOWNLOAD_MISSING_DRIVER = "CircuitPython drivers page missing
|
|||
ERROR_UNABLE_PULL_REPO_DIR = "Unable to pull repository directory"
|
||||
ERROR_UNABLE_PULL_REPO_EXAMPLES = "Unable to pull repository examples files"
|
||||
ERROR_NOT_ON_PYPI = "Not listed on PyPi for CPython use"
|
||||
ERROR_PYLINT_VERSION_NOT_FIXED = "PyLint version not fixed"
|
||||
ERROR_PYLINT_VERSION_NOT_LATEST = "PyLint version not latest"
|
||||
ERROR_PYLINT_FAILED_LINTING = "Failed PyLint checks"
|
||||
ERROR_BLACK_VERSION = "Missing or incorrect Black version in .pre-commit-config.yaml"
|
||||
ERROR_REUSE_VERSION = "Missing or incorrect REUSE version in .pre-commit-config.yaml"
|
||||
ERROR_PRE_COMMIT_VERSION = (
|
||||
"Missing or incorrect pre-commit version in .pre-commit-config.yaml"
|
||||
)
|
||||
ERROR_PYLINT_VERSION = "Missing or incorrect pylint version in .pre-commit-config.yaml"
|
||||
ERROR_CI_BUILD = "Failed CI build"
|
||||
ERROR_NEW_REPO_IN_WORK = "New repo(s) currently in work, and unreleased"
|
||||
|
||||
# Temp category for GitHub Actions migration.
|
||||
|
|
@ -208,7 +188,16 @@ STD_REPO_LABELS = {
|
|||
"good first issue": {"color": "7057ff"},
|
||||
}
|
||||
|
||||
_TOKEN_FUNCTIONS = []
|
||||
|
||||
|
||||
def uses_token(func):
|
||||
"""Decorator for recording functions that use tokens"""
|
||||
_TOKEN_FUNCTIONS.append(func.__name__)
|
||||
return func
|
||||
|
||||
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
class LibraryValidator:
|
||||
"""Class to hold instance variables needed to traverse the calling
|
||||
code, and the validator functions.
|
||||
|
|
@ -221,38 +210,72 @@ class LibraryValidator:
|
|||
self.bundle_submodules = bundle_submodules
|
||||
self.latest_pylint = pkg_version_parse(latest_pylint)
|
||||
self._rtd_yaml_base = None
|
||||
self._pcc_versions = {}
|
||||
self.output_file_data = []
|
||||
self.validate_contents_quiet = kw_args.get("validate_contents_quiet", False)
|
||||
self.has_setup_py_disabled = set()
|
||||
self.has_pyproject_toml_disabled = set()
|
||||
self.keep_repos = keep_repos
|
||||
self.rtd_subprojects = None
|
||||
self.core_driver_page = None
|
||||
|
||||
@property
|
||||
def rtd_yml_base(self):
|
||||
"""The parsed YAML from `.readthedocs.yml` in the cookiecutter-adafruit-circuitpython repo.
|
||||
Used to verify that a library's `.readthedocs.yml` matches this version.
|
||||
"""The parsed YAML from `.readthedocs.yaml` in the cookiecutter-adafruit-circuitpython repo.
|
||||
Used to verify that a library's `.readthedocs.yaml` matches this version.
|
||||
"""
|
||||
if self._rtd_yaml_base is None:
|
||||
rtd_yml_dl_url = (
|
||||
"https://raw.githubusercontent.com/adafruit/cookiecutter-adafruit-"
|
||||
"circuitpython/main/%7B%7B%20cookiecutter%20and%20'tmp_repo'%20%7D"
|
||||
"%7D/%7B%25%20if%20cookiecutter.sphinx_docs%20in%20%5B'y'%2C%20'yes'"
|
||||
"%5D%20%25%7D.readthedocs.yml%7B%25%20endif%20%25%7D"
|
||||
"circuitpython/main/%7B%7B%20cookiecutter.__dirname%20%7D%7D/%7B%25"
|
||||
"%20if%20cookiecutter.sphinx_docs%20in%20%5B'y'%2C%20'yes'%5D%20%25"
|
||||
"%7D.readthedocs.yaml%7B%25%20endif%20%25%7D"
|
||||
)
|
||||
rtd_yml = requests.get(rtd_yml_dl_url)
|
||||
if rtd_yml.ok:
|
||||
try:
|
||||
self._rtd_yaml_base = yaml.safe_load(rtd_yml.text)
|
||||
except yaml.YAMLError:
|
||||
print("Error parsing cookiecutter .readthedocs.yml.")
|
||||
print("Error parsing cookiecutter .readthedocs.yaml.")
|
||||
self._rtd_yaml_base = ""
|
||||
else:
|
||||
print("Error retrieving cookiecutter .readthedocs.yml")
|
||||
print("Error retrieving cookiecutter .readthedocs.yaml")
|
||||
self._rtd_yaml_base = ""
|
||||
|
||||
return self._rtd_yaml_base
|
||||
|
||||
@property
|
||||
def pcc_versions(self):
|
||||
"""The parsed YAML from `.pre-commit-config.yaml` in cookiecutter.
|
||||
Used to verify that a library's `.pre-commit-config.yaml` matches this.
|
||||
"""
|
||||
if not self._pcc_versions:
|
||||
pcc_yml_dl_url = (
|
||||
"https://raw.githubusercontent.com/adafruit/cookiecutter-adafruit-"
|
||||
"circuitpython/main/%7B%7B%20cookiecutter.__dirname%20%7D%7D/.pre-"
|
||||
"commit-config.yaml"
|
||||
)
|
||||
pcc_yml = requests.get(pcc_yml_dl_url)
|
||||
if pcc_yml.ok:
|
||||
try:
|
||||
pcc_yaml_base = yaml.safe_load(pcc_yml.text)
|
||||
except yaml.YAMLError:
|
||||
print("Error parsing cookiecutter .pre-commit-config.yaml.")
|
||||
pcc_yaml_base = ""
|
||||
else:
|
||||
print("Error retrieving cookiecutter .pre-commit-config.yaml")
|
||||
pcc_yaml_base = ""
|
||||
|
||||
for i in pcc_yaml_base["repos"]:
|
||||
self._pcc_versions[i["repo"]] = i["rev"]
|
||||
|
||||
return self._pcc_versions
|
||||
|
||||
@staticmethod
|
||||
def get_token_methods():
|
||||
"""Return a list of method names that require authentication"""
|
||||
|
||||
return _TOKEN_FUNCTIONS
|
||||
|
||||
def run_repo_validation(self, repo):
|
||||
"""Run all the current validation functions on the provided repository and
|
||||
return their results as a list of string errors.
|
||||
|
|
@ -289,7 +312,7 @@ class LibraryValidator:
|
|||
if repo_missing_some_keys:
|
||||
# only call the API if the passed in `repo` doesn't have what
|
||||
# we need.
|
||||
response = github.get("/repos/" + repo["full_name"])
|
||||
response = gh_reqs.get("/repos/" + repo["full_name"])
|
||||
if not response.ok:
|
||||
return [ERROR_UNABLE_PULL_REPO_DETAILS]
|
||||
repo_fields = response.json()
|
||||
|
|
@ -322,37 +345,13 @@ class LibraryValidator:
|
|||
errors.append(ERROR_ONLY_ALLOW_MERGES)
|
||||
return errors
|
||||
|
||||
def validate_actions_state(self, repo):
|
||||
"""Validate if the most recent GitHub Actions run on the default branch
|
||||
has passed.
|
||||
Just returns a message stating that the most recent run failed.
|
||||
"""
|
||||
if not (
|
||||
repo["owner"]["login"] == "adafruit"
|
||||
and repo["name"].startswith("Adafruit_CircuitPython")
|
||||
):
|
||||
return []
|
||||
|
||||
actions_params = {"branch": repo["default_branch"]}
|
||||
response = github.get(
|
||||
"/repos/" + repo["full_name"] + "/actions/runs", params=actions_params
|
||||
)
|
||||
|
||||
if not response.ok:
|
||||
return [ERROR_UNABLE_PULL_REPO_DETAILS]
|
||||
|
||||
workflow_runs = response.json()["workflow_runs"]
|
||||
if workflow_runs and workflow_runs[0]["conclusion"] == "failure":
|
||||
return [ERROR_GITHUB_FAILING_ACTIONS]
|
||||
return []
|
||||
|
||||
# pylint: disable=too-many-locals,too-many-return-statements,too-many-branches
|
||||
def validate_release_state(self, repo):
|
||||
"""Validate if a repo 1) has a release, and 2) if there have been commits
|
||||
since the last release. Only files that drive user-facing changes
|
||||
will be considered when flagging a repo as needing a release.
|
||||
|
||||
If 2), categorize by length of time passed since oldest commit after the release,
|
||||
If 2), categorize by length of ti#me passed since oldest commit after the release,
|
||||
and return the number of days that have passed since the oldest commit.
|
||||
"""
|
||||
|
||||
|
|
@ -362,13 +361,12 @@ class LibraryValidator:
|
|||
"LICENSE",
|
||||
"LICENSES/*",
|
||||
"*.license",
|
||||
"setup.py.disabled",
|
||||
"pyproject.toml.disabled",
|
||||
".github/workflows/build.yml",
|
||||
".github/workflows/release.yml",
|
||||
".pre-commit-config.yaml",
|
||||
".pylintrc",
|
||||
".gitignore",
|
||||
"CODE_OF_CONDUCT.md",
|
||||
"README.rst",
|
||||
"pyproject.toml",
|
||||
}
|
||||
|
|
@ -385,7 +383,7 @@ class LibraryValidator:
|
|||
if repo["name"] in BUNDLE_IGNORE_LIST:
|
||||
return []
|
||||
|
||||
repo_last_release = github.get(
|
||||
repo_last_release = gh_reqs.get(
|
||||
"/repos/" + repo["full_name"] + "/releases/latest"
|
||||
)
|
||||
if not repo_last_release.ok:
|
||||
|
|
@ -404,7 +402,7 @@ class LibraryValidator:
|
|||
|
||||
tag_name = repo_release_json.get("tag_name", "")
|
||||
main_branch = repo["default_branch"]
|
||||
compare_tags = github.get(
|
||||
compare_tags = gh_reqs.get(
|
||||
f"/repos/{repo['full_name']}/compare/{tag_name}...{main_branch}"
|
||||
)
|
||||
if not compare_tags.ok:
|
||||
|
|
@ -415,7 +413,6 @@ class LibraryValidator:
|
|||
compare_tags_json = compare_tags.json()
|
||||
if "status" in compare_tags_json:
|
||||
if compare_tags_json["status"] != "identical":
|
||||
|
||||
filtered_files = _filter_file_diffs(
|
||||
[file["filename"] for file in compare_tags_json.get("files")]
|
||||
)
|
||||
|
|
@ -556,44 +553,53 @@ class LibraryValidator:
|
|||
|
||||
errors = []
|
||||
|
||||
pylint_version = None
|
||||
re_pip_pattern = r"pip\sinstall.*"
|
||||
re_pylint_pattern = r"(?P<pylint>pylint(?:[<>~=]){0,2}\d*(?:\.\d){0,2})"
|
||||
|
||||
pip_line = re.search(re_pip_pattern, contents.text)
|
||||
if not pip_line:
|
||||
return [ERROR_PYLINT_VERSION_NOT_FIXED]
|
||||
|
||||
pip_line = pip_line[0]
|
||||
|
||||
pylint_info = re.search(re_pylint_pattern, pip_line)
|
||||
if not pylint_info or not pylint_info.group("pylint"):
|
||||
return [ERROR_PYLINT_VERSION_NOT_FIXED]
|
||||
|
||||
try:
|
||||
pylint_version = Requirement(pylint_info.group("pylint"))
|
||||
except InvalidRequirement:
|
||||
pass
|
||||
|
||||
if not pylint_version:
|
||||
errors.append(ERROR_PYLINT_VERSION_NOT_FIXED)
|
||||
elif self.latest_pylint not in pylint_version.specifier:
|
||||
errors.append(ERROR_PYLINT_VERSION_NOT_LATEST)
|
||||
|
||||
return errors
|
||||
|
||||
def _validate_setup_py(self, file_info):
|
||||
"""Check setup.py for pypi compatibility"""
|
||||
def _validate_pre_commit_config_yaml(self, file_info):
|
||||
download_url = file_info["download_url"]
|
||||
contents = requests.get(download_url, timeout=30)
|
||||
if not contents.ok:
|
||||
return [ERROR_PYFILE_DOWNLOAD_FAILED]
|
||||
|
||||
text = contents.text
|
||||
|
||||
errors = []
|
||||
|
||||
black_repo = "repo: https://github.com/python/black"
|
||||
black_version = "rev: 22.3.0"
|
||||
|
||||
if black_repo not in text or black_version not in text:
|
||||
errors.append(ERROR_BLACK_VERSION)
|
||||
|
||||
reuse_repo = "repo: https://github.com/fsfe/reuse-tool"
|
||||
reuse_version = "rev: v0.14.0"
|
||||
|
||||
if reuse_repo not in text or reuse_version not in text:
|
||||
errors.append(ERROR_REUSE_VERSION)
|
||||
|
||||
pc_repo = "repo: https://github.com/pre-commit/pre-commit-hooks"
|
||||
pc_version = "rev: v4.2.0"
|
||||
|
||||
if pc_repo not in text or pc_version not in text:
|
||||
errors.append(ERROR_PRE_COMMIT_VERSION)
|
||||
|
||||
pylint_repo = "repo: https://github.com/pycqa/pylint"
|
||||
pylint_version = "rev: v2.15.5"
|
||||
|
||||
if pylint_repo not in text or pylint_version not in text:
|
||||
errors.append(ERROR_PYLINT_VERSION)
|
||||
|
||||
return errors
|
||||
|
||||
def _validate_requirements_txt(self, repo, file_info):
|
||||
def _validate_pyproject_toml(self, file_info):
|
||||
"""Check pyproject.toml for pypi compatibility"""
|
||||
download_url = file_info["download_url"]
|
||||
contents = requests.get(download_url, timeout=30)
|
||||
if not contents.ok:
|
||||
return [ERROR_TOMLFILE_DOWNLOAD_FAILED]
|
||||
return []
|
||||
|
||||
def _validate_requirements_txt(self, repo, file_info, check_blinka=True):
|
||||
"""Check requirements.txt for pypi compatibility"""
|
||||
download_url = file_info["download_url"]
|
||||
contents = requests.get(download_url, timeout=30)
|
||||
|
|
@ -604,7 +610,11 @@ class LibraryValidator:
|
|||
lines = contents.text.split("\n")
|
||||
blinka_lines = [l for l in lines if re.match(r"[\s]*Adafruit-Blinka[\s]*", l)]
|
||||
|
||||
if not blinka_lines and repo["name"] not in LIBRARIES_DONT_NEED_BLINKA:
|
||||
if (
|
||||
not blinka_lines
|
||||
and repo["name"] not in LIBRARIES_DONT_NEED_BLINKA
|
||||
and check_blinka
|
||||
):
|
||||
errors.append(ERROR_MISSING_BLINKA)
|
||||
return errors
|
||||
|
||||
|
|
@ -624,7 +634,7 @@ class LibraryValidator:
|
|||
if repo["name"] == BUNDLE_REPO_NAME:
|
||||
return []
|
||||
|
||||
content_list = github.get("/repos/" + repo["full_name"] + "/contents/")
|
||||
content_list = gh_reqs.get("/repos/" + repo["full_name"] + "/contents/")
|
||||
empty_repo = False
|
||||
if not content_list.ok:
|
||||
# Empty repos return:
|
||||
|
|
@ -645,18 +655,18 @@ class LibraryValidator:
|
|||
files = [x["name"] for x in content_list]
|
||||
|
||||
# ignore new/in-work repos, which should have less than 8 files:
|
||||
# ___.py or folder, CoC, .github/, .readthedocs.yml, docs/,
|
||||
# ___.py or folder, CoC, .github/, .readthedocs.yaml, docs/,
|
||||
# examples/, README, LICENSE
|
||||
if len(files) < 8:
|
||||
BUNDLE_IGNORE_LIST.append(repo["name"])
|
||||
if not self.validate_contents_quiet:
|
||||
return [ERROR_NEW_REPO_IN_WORK]
|
||||
|
||||
if "setup.py.disabled" in files:
|
||||
self.has_setup_py_disabled.add(repo["name"])
|
||||
if "pyproject.toml.disabled" in files:
|
||||
self.has_pyproject_toml_disabled.add(repo["name"])
|
||||
|
||||
# if we're only running due to -v, ignore the rest. we only care about
|
||||
# adding in-work repos to the BUNDLE_IGNORE_LIST and if setup.py is
|
||||
# adding in-work repos to the BUNDLE_IGNORE_LIST and if pyproject.toml is
|
||||
# disabled
|
||||
if self.validate_contents_quiet:
|
||||
return []
|
||||
|
|
@ -692,7 +702,7 @@ class LibraryValidator:
|
|||
|
||||
if build_yml_url:
|
||||
build_yml_url = build_yml_url + "/workflows/build.yml"
|
||||
response = github.get(build_yml_url)
|
||||
response = gh_reqs.get(build_yml_url)
|
||||
if response.ok:
|
||||
actions_build_info = response.json()
|
||||
|
||||
|
|
@ -701,11 +711,11 @@ class LibraryValidator:
|
|||
else:
|
||||
errors.append(ERROR_UNABLE_PULL_REPO_CONTENTS)
|
||||
|
||||
if "readthedocs.yml" in files or ".readthedocs.yml" in files:
|
||||
if "readthedocs.yaml" in files or ".readthedocs.yaml" in files:
|
||||
if self.rtd_yml_base != "":
|
||||
filename = "readthedocs.yml"
|
||||
if ".readthedocs.yml" in files:
|
||||
filename = ".readthedocs.yml"
|
||||
filename = "readthedocs.yaml"
|
||||
if ".readthedocs.yaml" in files:
|
||||
filename = ".readthedocs.yaml"
|
||||
file_info = content_list[files.index(filename)]
|
||||
rtd_contents = requests.get(file_info["download_url"])
|
||||
if rtd_contents.ok:
|
||||
|
|
@ -715,24 +725,57 @@ class LibraryValidator:
|
|||
errors.append(ERROR_MISMATCHED_READTHEDOCS)
|
||||
except yaml.YAMLError:
|
||||
self.output_file_data.append(
|
||||
"Error parsing {} .readthedocs.yml.".format(repo["name"])
|
||||
"Error parsing {} .readthedocs.yaml.".format(repo["name"])
|
||||
)
|
||||
errors.append(ERROR_OUTPUT_HANDLER)
|
||||
else:
|
||||
errors.append(ERROR_MISSING_READTHEDOCS)
|
||||
|
||||
if "setup.py" in files:
|
||||
file_info = content_list[files.index("setup.py")]
|
||||
errors.extend(self._validate_setup_py(file_info))
|
||||
elif "setup.py.disabled" not in files:
|
||||
errors.append(ERROR_MISSING_SETUP_PY)
|
||||
if ".pre-commit-config.yaml" in files:
|
||||
if len(self._pcc_versions) or self.pcc_versions != "":
|
||||
filename = ".pre-commit-config.yaml"
|
||||
file_info = content_list[files.index(filename)]
|
||||
pcc_contents = requests.get(file_info["download_url"])
|
||||
if pcc_contents.ok:
|
||||
try:
|
||||
pcc_yml = yaml.safe_load(pcc_contents.text)
|
||||
pcc_versions = {}
|
||||
for i in pcc_yml["repos"]:
|
||||
pcc_versions[i["repo"]] = i["rev"]
|
||||
if self._pcc_versions != pcc_versions:
|
||||
errors.append(ERROR_MISMATCHED_PRE_COMMIT_CONFIG)
|
||||
except yaml.YAMLError:
|
||||
self.output_file_data.append(
|
||||
"Error parsing {} .pre-commit-config.yaml.".format(
|
||||
repo["name"]
|
||||
)
|
||||
)
|
||||
errors.append(ERROR_OUTPUT_HANDLER)
|
||||
else:
|
||||
errors.append(ERROR_MISSING_PRE_COMMIT_CONFIG)
|
||||
|
||||
if repo["name"] not in self.has_setup_py_disabled:
|
||||
if "pyproject.toml" in files:
|
||||
file_info = content_list[files.index("pyproject.toml")]
|
||||
errors.extend(self._validate_pyproject_toml(file_info))
|
||||
else:
|
||||
errors.append(ERROR_MISSING_PYPROJECT_TOML)
|
||||
|
||||
if "setup.py" in files:
|
||||
errors.append(ERROR_SETUP_PY_EXISTS)
|
||||
|
||||
if repo["name"] not in self.has_pyproject_toml_disabled:
|
||||
if "requirements.txt" in files:
|
||||
file_info = content_list[files.index("requirements.txt")]
|
||||
errors.extend(self._validate_requirements_txt(repo, file_info))
|
||||
else:
|
||||
errors.append(ERROR_MISSING_REQUIREMENTS_TXT)
|
||||
if "optional_requirements.txt" in files:
|
||||
file_info = content_list[files.index("optional_requirements.txt")]
|
||||
errors.extend(
|
||||
self._validate_requirements_txt(repo, file_info, check_blinka=False)
|
||||
)
|
||||
else:
|
||||
errors.append(ERROR_MISSING_OPTIONAL_REQUIREMENTS_TXT)
|
||||
|
||||
# Check for an examples folder.
|
||||
dirs = [
|
||||
|
|
@ -745,7 +788,7 @@ class LibraryValidator:
|
|||
while dirs:
|
||||
# loop through the results to ensure we capture files
|
||||
# in subfolders, and add any files in the current directory
|
||||
result = github.get(dirs.pop(0))
|
||||
result = gh_reqs.get(dirs.pop(0))
|
||||
if not result.ok:
|
||||
errors.append(ERROR_UNABLE_PULL_REPO_EXAMPLES)
|
||||
break
|
||||
|
|
@ -758,16 +801,17 @@ class LibraryValidator:
|
|||
else:
|
||||
|
||||
def __check_lib_name(
|
||||
repo_name, file_name
|
||||
repo_name,
|
||||
file_name,
|
||||
): # pylint: disable=unused-private-member
|
||||
"""Nested function to test example file names.
|
||||
Allows examples to either match the repo name,
|
||||
or have additional underscores separating the repo name.
|
||||
"""
|
||||
file_names = set()
|
||||
file_names.add(file_name)
|
||||
file_names.add(file_name[9:])
|
||||
|
||||
name_split = file_name.split("_")
|
||||
name_split = file_name[9:].split("_")
|
||||
name_rebuilt = "".join(
|
||||
(part for part in name_split if ".py" not in part)
|
||||
)
|
||||
|
|
@ -785,7 +829,7 @@ class LibraryValidator:
|
|||
for example in examples_list:
|
||||
if example["name"].endswith(".py"):
|
||||
check_lib_name = __check_lib_name(
|
||||
lib_name, example["name"].lower()
|
||||
lib_name, example["path"].lower()
|
||||
)
|
||||
if not check_lib_name:
|
||||
all_have_name = False
|
||||
|
|
@ -812,7 +856,7 @@ class LibraryValidator:
|
|||
for adir in dirs:
|
||||
if re_str.fullmatch(adir):
|
||||
# retrieve the files in that directory
|
||||
dir_file_list = github.get(
|
||||
dir_file_list = gh_reqs.get(
|
||||
"/repos/" + repo["full_name"] + "/contents/" + adir
|
||||
)
|
||||
if not dir_file_list.ok:
|
||||
|
|
@ -831,8 +875,10 @@ class LibraryValidator:
|
|||
|
||||
return errors
|
||||
|
||||
@uses_token
|
||||
def validate_readthedocs(self, repo):
|
||||
"""Method to check the health of `repo`'s ReadTheDocs."""
|
||||
"""Method to check the status of `repo`'s ReadTheDocs."""
|
||||
|
||||
if not (
|
||||
repo["owner"]["login"] == "adafruit"
|
||||
and repo["name"].startswith("Adafruit_CircuitPython")
|
||||
|
|
@ -851,7 +897,6 @@ class LibraryValidator:
|
|||
self.rtd_subprojects[
|
||||
common_funcs.sanitize_url(subproject["repo"])
|
||||
] = subproject
|
||||
|
||||
repo_url = common_funcs.sanitize_url(repo["clone_url"])
|
||||
if repo_url not in self.rtd_subprojects:
|
||||
return [ERROR_RTD_SUBPROJECT_MISSING]
|
||||
|
|
@ -862,81 +907,55 @@ class LibraryValidator:
|
|||
if 105398 not in subproject["users"]:
|
||||
errors.append(ERROR_RTD_ADABOT_MISSING)
|
||||
|
||||
valid_versions = requests.get(
|
||||
"https://readthedocs.org/api/v2/project/{}/active_versions/".format(
|
||||
subproject["id"]
|
||||
),
|
||||
timeout=15,
|
||||
)
|
||||
if not valid_versions.ok:
|
||||
errors.append(ERROR_RTD_VALID_VERSIONS_FAILED)
|
||||
else:
|
||||
valid_versions = valid_versions.json()
|
||||
latest_release = github.get(
|
||||
"/repos/{}/releases/latest".format(repo["full_name"])
|
||||
)
|
||||
if not latest_release.ok:
|
||||
errors.append(ERROR_GITHUB_RELEASE_FAILED)
|
||||
# disabling this for now, since it is ignored and always fails
|
||||
# else:
|
||||
# if (
|
||||
# latest_release.json()["tag_name"] not in
|
||||
# [tag["verbose_name"] for tag in valid_versions["versions"]]
|
||||
# ):
|
||||
# errors.append(ERROR_RTD_MISSING_LATEST_RELEASE)
|
||||
# Get the README file contents
|
||||
try:
|
||||
lib_repo = GH_INTERFACE.get_repo(repo["full_name"])
|
||||
content_file = lib_repo.get_contents("README.rst")
|
||||
except pygithub.GithubException:
|
||||
errors.append(ERROR_RTD_FAILED_TO_LOAD_BUILD_STATUS_GH_NONLIMITED)
|
||||
return errors
|
||||
|
||||
# There is no API which gives access to a list of builds for a project so we parse the html
|
||||
# webpage.
|
||||
builds_webpage = requests.get(
|
||||
"https://readthedocs.org/projects/{}/builds/".format(subproject["slug"]),
|
||||
timeout=15,
|
||||
)
|
||||
# pylint: disable=too-many-nested-blocks
|
||||
# TODO: look into reducing the number of nested blocks.
|
||||
if not builds_webpage.ok:
|
||||
errors.append(ERROR_RTD_FAILED_TO_LOAD_BUILDS)
|
||||
else:
|
||||
for line in builds_webpage.text.split("\n"):
|
||||
if '<div id="build-' in line:
|
||||
build_id = line.split('"')[1][len("build-") :]
|
||||
# We only validate the most recent, latest build. So, break when the first "version
|
||||
# latest" found. Its in the page after the build id.
|
||||
if "version latest" in line:
|
||||
break
|
||||
build_info = requests.get(
|
||||
"https://readthedocs.org/api/v2/build/{}/".format(build_id), timeout=15
|
||||
)
|
||||
if not build_info.ok:
|
||||
errors.append(ERROR_RTD_FAILED_TO_LOAD_BUILD_INFO)
|
||||
else:
|
||||
build_info = build_info.json()
|
||||
output_ok = True
|
||||
autodoc_ok = True
|
||||
sphinx_ok = True
|
||||
for command in build_info["commands"]:
|
||||
if command["command"].endswith("_build/html"):
|
||||
for line in command["output"].split("\n"):
|
||||
if "... " in line:
|
||||
_, line = line.split("... ")
|
||||
if "WARNING" in line or "ERROR" in line:
|
||||
if not line.startswith(("WARNING", "ERROR")):
|
||||
line = line.split(" ", 1)[1]
|
||||
if not line.startswith(RTD_IGNORE_NOTICES):
|
||||
output_ok = False
|
||||
elif line.startswith("ImportError"):
|
||||
autodoc_ok = False
|
||||
elif line.startswith("sphinx.errors") or line.startswith(
|
||||
"SphinxError"
|
||||
):
|
||||
sphinx_ok = False
|
||||
break
|
||||
if not output_ok:
|
||||
errors.append(ERROR_RTD_OUTPUT_HAS_WARNINGS)
|
||||
if not autodoc_ok:
|
||||
errors.append(ERROR_RTD_AUTODOC_FAILED)
|
||||
if not sphinx_ok:
|
||||
errors.append(ERROR_RTD_SPHINX_FAILED)
|
||||
readme_text = content_file.decoded_content.decode("utf-8")
|
||||
|
||||
# Parse for the ReadTheDocs slug
|
||||
search_results: parse.Result = parse.search(
|
||||
"https://readthedocs.org/projects/{slug:S}/badge", readme_text
|
||||
)
|
||||
rtd_slug: str = search_results.named["slug"]
|
||||
rtd_slug = rtd_slug.replace("_", "-", -1)
|
||||
|
||||
while True:
|
||||
# GET the latest documentation build runs
|
||||
url = f"https://readthedocs.org/api/v3/projects/{rtd_slug}/builds/"
|
||||
rtd_token = os.environ["RTD_TOKEN"]
|
||||
headers = {"Authorization": f"token {rtd_token}"}
|
||||
response = requests.get(url, headers=headers)
|
||||
json_response = response.json()
|
||||
|
||||
error_message = json_response.get("detail")
|
||||
if error_message:
|
||||
if error_message == "Not found." or not error_message.startswith(
|
||||
"Request was throttled."
|
||||
):
|
||||
errors.append(ERROR_RTD_FAILED_TO_LOAD_BUILD_STATUS_RTD_NONLIMITED)
|
||||
return errors
|
||||
time_result = parse.search(
|
||||
"Request was throttled. Expected available in {throttled:d} seconds.",
|
||||
error_message,
|
||||
)
|
||||
time.sleep(time_result.named["throttled"] + 3)
|
||||
continue
|
||||
break
|
||||
|
||||
# Return the results of the latest run
|
||||
doc_build_results = json_response.get("results")
|
||||
if doc_build_results is None:
|
||||
errors.append(ERROR_RTD_FAILED_TO_LOAD_BUILD_STATUS_RTD_UNEXPECTED_RETURN)
|
||||
return errors
|
||||
result = doc_build_results[0].get("success")
|
||||
time.sleep(3)
|
||||
if not result:
|
||||
errors.append(ERROR_RTD_OUTPUT_HAS_WARNINGS)
|
||||
return errors
|
||||
|
||||
def validate_core_driver_page(self, repo):
|
||||
|
|
@ -962,9 +981,7 @@ class LibraryValidator:
|
|||
|
||||
repo_short_name = repo["name"][len("Adafruit_CircuitPython_") :].lower()
|
||||
full_url = (
|
||||
"https://circuitpython.readthedocs.io/projects/"
|
||||
+ repo_short_name
|
||||
+ "/en/latest/"
|
||||
"https://docs.circuitpython.org/projects/" + repo_short_name + "/en/latest/"
|
||||
)
|
||||
full_url_dashes = full_url.replace("_", "-")
|
||||
if (
|
||||
|
|
@ -978,7 +995,7 @@ class LibraryValidator:
|
|||
def github_get_all_pages(self, url, params):
|
||||
"""Retrieves all paginated results from the GitHub `url`."""
|
||||
results = []
|
||||
response = github.get(url, params=params)
|
||||
response = gh_reqs.get(url, params=params)
|
||||
|
||||
if not response.ok:
|
||||
self.output_file_data.append(f"Github request failed: {url}")
|
||||
|
|
@ -988,7 +1005,7 @@ class LibraryValidator:
|
|||
results.extend(response.json())
|
||||
|
||||
if response.links.get("next"):
|
||||
response = github.get(response.links["next"]["url"])
|
||||
response = gh_reqs.get(response.links["next"]["url"])
|
||||
else:
|
||||
break
|
||||
|
||||
|
|
@ -1021,12 +1038,13 @@ class LibraryValidator:
|
|||
issue["created_at"], "%Y-%m-%dT%H:%M:%SZ"
|
||||
)
|
||||
if "pull_request" in issue:
|
||||
pr_info = github.get(issue["pull_request"]["url"])
|
||||
pr_info = gh_reqs.get(issue["pull_request"]["url"])
|
||||
pr_info = pr_info.json()
|
||||
if issue["state"] == "open":
|
||||
if created > since:
|
||||
insights["new_prs"] += 1
|
||||
insights["pr_authors"].add(pr_info["user"]["login"])
|
||||
if pr_info["user"]:
|
||||
insights["pr_authors"].add(pr_info["user"]["login"])
|
||||
insights["active_prs"] += 1
|
||||
else:
|
||||
merged = datetime.datetime.strptime(
|
||||
|
|
@ -1058,7 +1076,7 @@ class LibraryValidator:
|
|||
|
||||
pr_author = pr_info["user"]["login"]
|
||||
if pr_author == "weblate":
|
||||
pr_commits = github.get(str(pr_info["url"]) + "/commits")
|
||||
pr_commits = gh_reqs.get(str(pr_info["url"]) + "/commits")
|
||||
if pr_commits.ok:
|
||||
for commit in pr_commits.json():
|
||||
author = commit.get("author")
|
||||
|
|
@ -1070,27 +1088,32 @@ class LibraryValidator:
|
|||
insights["pr_merged_authors"].add(pr_info["user"]["login"])
|
||||
|
||||
insights["pr_reviewers"].add(pr_info["merged_by"]["login"])
|
||||
pr_reviews = github.get(str(pr_info["url"]) + "/reviews")
|
||||
pr_reviews = gh_reqs.get(str(pr_info["url"]) + "/reviews")
|
||||
if pr_reviews.ok:
|
||||
for review in pr_reviews.json():
|
||||
if review["state"].lower() == "approved":
|
||||
if (
|
||||
review["state"].lower() == "approved"
|
||||
and review["user"]
|
||||
):
|
||||
insights["pr_reviewers"].add(
|
||||
review["user"]["login"]
|
||||
)
|
||||
else:
|
||||
insights["closed_prs"] += 1
|
||||
else:
|
||||
issue_info = github.get(issue["url"])
|
||||
issue_info = gh_reqs.get(issue["url"])
|
||||
issue_info = issue_info.json()
|
||||
if issue["state"] == "open":
|
||||
if created > since:
|
||||
insights["new_issues"] += 1
|
||||
insights["issue_authors"].add(issue_info["user"]["login"])
|
||||
if issue_info["user"]:
|
||||
insights["issue_authors"].add(issue_info["user"]["login"])
|
||||
insights["active_issues"] += 1
|
||||
|
||||
else:
|
||||
insights["closed_issues"] += 1
|
||||
insights["issue_closers"].add(issue_info["closed_by"]["login"])
|
||||
if issue_info["closed_by"]:
|
||||
insights["issue_closers"].add(issue_info["closed_by"]["login"])
|
||||
|
||||
params = {"state": "open", "per_page": 100}
|
||||
issues = self.github_get_all_pages(
|
||||
|
|
@ -1107,9 +1130,14 @@ class LibraryValidator:
|
|||
if days_open.days < 0: # opened earlier today
|
||||
days_open += datetime.timedelta(days=(days_open.days * -1))
|
||||
if "pull_request" in issue:
|
||||
pull_request = gh_reqs.get(
|
||||
f"/repos/{repo['full_name']}/pulls/{issue['number']}"
|
||||
).json()
|
||||
pr_link = "{0} (Open {1} days)".format(
|
||||
issue["pull_request"]["html_url"], days_open.days
|
||||
)
|
||||
if pull_request["draft"]:
|
||||
pr_link += " (draft)"
|
||||
insights["open_prs"].append(pr_link)
|
||||
else:
|
||||
issue_link = "{0} (Open {1} days)".format(
|
||||
|
|
@ -1139,7 +1167,7 @@ class LibraryValidator:
|
|||
# get milestones for core repo
|
||||
if repo["name"] == "circuitpython":
|
||||
params = {"state": "open"}
|
||||
response = github.get(
|
||||
response = gh_reqs.get(
|
||||
"/repos/adafruit/circuitpython/milestones", params=params
|
||||
)
|
||||
if not response.ok:
|
||||
|
|
@ -1155,7 +1183,7 @@ class LibraryValidator:
|
|||
"""prints a list of Adafruit_CircuitPython libraries that are in pypi"""
|
||||
if (
|
||||
repo["name"] in BUNDLE_IGNORE_LIST
|
||||
or repo["name"] in self.has_setup_py_disabled
|
||||
or repo["name"] in self.has_pyproject_toml_disabled
|
||||
):
|
||||
return []
|
||||
if not (
|
||||
|
|
@ -1169,7 +1197,7 @@ class LibraryValidator:
|
|||
|
||||
def validate_labels(self, repo):
|
||||
"""ensures the repo has the standard labels available"""
|
||||
response = github.get("/repos/" + repo["full_name"] + "/labels")
|
||||
response = gh_reqs.get("/repos/" + repo["full_name"] + "/labels")
|
||||
if not response.ok:
|
||||
# replace 'output_handler' with ERROR_OUTPUT_HANDLER
|
||||
self.output_file_data.append(
|
||||
|
|
@ -1184,7 +1212,7 @@ class LibraryValidator:
|
|||
has_all_labels = True
|
||||
for label, info in STD_REPO_LABELS.items():
|
||||
if not label in repo_labels:
|
||||
response = github.post(
|
||||
response = gh_reqs.post(
|
||||
"/repos/" + repo["full_name"] + "/labels",
|
||||
json={"name": label, "color": info["color"]},
|
||||
)
|
||||
|
|
@ -1203,70 +1231,36 @@ class LibraryValidator:
|
|||
|
||||
return errors
|
||||
|
||||
def validate_passes_linting(self, repo):
|
||||
"""Clones the repo and runs pylint on the Python files"""
|
||||
@uses_token
|
||||
def validate_actions_state(self, repo):
|
||||
"""Validate if the most recent GitHub Actions run on the default branch
|
||||
has passed.
|
||||
Just returns a message stating that the most recent run failed.
|
||||
"""
|
||||
|
||||
if not repo["name"].startswith("Adafruit_CircuitPython"):
|
||||
return []
|
||||
|
||||
ignored_py_files = ["setup.py", "conf.py"]
|
||||
lib_repo = GH_INTERFACE.get_repo(repo["full_name"])
|
||||
|
||||
desination_type = TemporaryDirectory
|
||||
if self.keep_repos:
|
||||
desination_type = pathlib.Path("repos").absolute
|
||||
if lib_repo.archived:
|
||||
return []
|
||||
|
||||
with desination_type() as tempdir:
|
||||
repo_dir = pathlib.Path(tempdir) / repo["name"]
|
||||
try:
|
||||
if not repo_dir.exists():
|
||||
git.clone("--depth=1", repo["git_url"], repo_dir)
|
||||
except sh.ErrorReturnCode as err:
|
||||
self.output_file_data.append(
|
||||
f"Failed to clone repo for linting: {repo['full_name']}\n {err.stderr}"
|
||||
)
|
||||
return [ERROR_OUTPUT_HANDLER]
|
||||
|
||||
if self.keep_repos and (repo_dir / ".pylint-ok").exists():
|
||||
return []
|
||||
|
||||
for file in repo_dir.rglob("*.py"):
|
||||
if file.name in ignored_py_files or str(file.parent).endswith(
|
||||
"examples"
|
||||
):
|
||||
continue
|
||||
|
||||
pylint_args = [str(file)]
|
||||
if (repo_dir / ".pylintrc").exists():
|
||||
pylint_args += [f"--rcfile={str(repo_dir / '.pylintrc')}"]
|
||||
|
||||
reporter = CapturedJsonReporter()
|
||||
|
||||
logging.debug("Running pylint on %s", file)
|
||||
|
||||
lint.Run(pylint_args, reporter=reporter, exit=False)
|
||||
pylint_stderr = ""
|
||||
pylint_stdout = reporter.get_result()
|
||||
|
||||
if pylint_stderr:
|
||||
self.output_file_data.append(
|
||||
f"PyLint error ({repo['name']}): '{pylint_stderr}'"
|
||||
)
|
||||
return [ERROR_OUTPUT_HANDLER]
|
||||
|
||||
try:
|
||||
pylint_result = json.loads(pylint_stdout)
|
||||
except json.JSONDecodeError as json_err:
|
||||
self.output_file_data.append(
|
||||
f"PyLint output JSONDecodeError: {json_err.msg}"
|
||||
)
|
||||
return [ERROR_OUTPUT_HANDLER]
|
||||
|
||||
if pylint_result:
|
||||
return [ERROR_PYLINT_FAILED_LINTING]
|
||||
|
||||
if self.keep_repos:
|
||||
with open(repo_dir / ".pylint-ok", "w") as pylint_ok:
|
||||
pylint_ok.write("".join(pylint_result))
|
||||
arg_dict = {"branch": lib_repo.default_branch}
|
||||
|
||||
try:
|
||||
workflow = lib_repo.get_workflow("build.yml")
|
||||
workflow_runs = workflow.get_runs(**arg_dict)
|
||||
except pygithub.GithubException: # This can probably be tightened later
|
||||
# No workflows or runs yet
|
||||
return []
|
||||
try:
|
||||
if workflow_runs[0].conclusion != "success":
|
||||
return [ERROR_CI_BUILD]
|
||||
except IndexError:
|
||||
# The CI hasn't run yet, so empty list of workflow runs returned
|
||||
# This doesn't indicate a failure, so skip it
|
||||
pass
|
||||
return []
|
||||
|
||||
def validate_default_branch(self, repo):
|
||||
|
|
|
|||
|
|
@ -1,24 +1,6 @@
|
|||
# The MIT License (MIT)
|
||||
# SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries
|
||||
#
|
||||
# Copyright (c) 2017 Scott Shawcroft for Adafruit Industries
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
# GitHub API Serch has stopped returning the core repo for some reason. Tried several
|
||||
# different search params, and came up emtpy. Hardcoding it as a failsafe.
|
||||
|
|
@ -30,7 +12,7 @@ import datetime
|
|||
import os
|
||||
import re
|
||||
import requests
|
||||
from adabot import github_requests as github
|
||||
from adabot import github_requests as gh_reqs
|
||||
from adabot import pypi_requests as pypi
|
||||
|
||||
CORE_REPO_URL = "/repos/adafruit/circuitpython"
|
||||
|
|
@ -55,7 +37,7 @@ def parse_gitmodules(input_text):
|
|||
values in double quotes are completely lost. A very basic regular
|
||||
expression-based parsing logic is used here to parse the data. This parsing
|
||||
is far from perfect and does not handle escaping quotes, line continuations
|
||||
(when a line ends in '\;'), etc. Unfortunately the git config format is
|
||||
(when a line ends in '\\;'), etc. Unfortunately the git config format is
|
||||
surprisingly complex and no mature parsing modules are available (outside
|
||||
the code in git itself).
|
||||
"""
|
||||
|
|
@ -183,7 +165,7 @@ def list_repos(*, include_repos=None):
|
|||
are included.
|
||||
"""
|
||||
repos = []
|
||||
result = github.get(
|
||||
result = gh_reqs.get(
|
||||
"/search/repositories",
|
||||
params={
|
||||
"q": "Adafruit_CircuitPython user:adafruit archived:false fork:true",
|
||||
|
|
@ -208,21 +190,21 @@ def list_repos(*, include_repos=None):
|
|||
)
|
||||
|
||||
if result.links.get("next"):
|
||||
result = github.get(result.links["next"]["url"])
|
||||
result = gh_reqs.get(result.links["next"]["url"])
|
||||
else:
|
||||
break
|
||||
|
||||
repo_names = [repo["name"] for repo in repos]
|
||||
|
||||
if "circuitpython" not in repo_names:
|
||||
core = github.get(CORE_REPO_URL)
|
||||
core = gh_reqs.get(CORE_REPO_URL)
|
||||
if core.ok:
|
||||
repos.append(core.json())
|
||||
|
||||
if include_repos:
|
||||
for repo in include_repos:
|
||||
if repo not in repo_names:
|
||||
add_repo = github.get("/repos/adafruit/" + repo)
|
||||
add_repo = gh_reqs.get("/repos/adafruit/" + repo)
|
||||
if add_repo.ok:
|
||||
repos.append(add_repo.json())
|
||||
else:
|
||||
|
|
@ -265,7 +247,7 @@ def is_new_or_updated(repo):
|
|||
today_minus_seven = datetime.datetime.today() - datetime.timedelta(days=7)
|
||||
|
||||
# first, check the latest release to see if within the last 7 days
|
||||
result = github.get("/repos/adafruit/" + repo["name"] + "/releases/latest")
|
||||
result = gh_reqs.get("/repos/adafruit/" + repo["name"] + "/releases/latest")
|
||||
if not result.ok:
|
||||
return None
|
||||
release_info = result.json()
|
||||
|
|
@ -280,7 +262,7 @@ def is_new_or_updated(repo):
|
|||
|
||||
# we have a release within the last 7 days. now check if its a newly
|
||||
# released library within the last week, or if its just an update
|
||||
result = github.get("/repos/adafruit/" + repo["name"] + "/releases")
|
||||
result = gh_reqs.get("/repos/adafruit/" + repo["name"] + "/releases")
|
||||
if not result.ok:
|
||||
return None
|
||||
|
||||
|
|
@ -309,7 +291,7 @@ def whois_github_user():
|
|||
if "GITHUB_ACTOR" in os.environ:
|
||||
user = os.environ["GITHUB_ACTOR"]
|
||||
else:
|
||||
user = github.get("/user").json()["login"]
|
||||
user = gh_reqs.get("/user").json()["login"]
|
||||
|
||||
return user
|
||||
|
||||
|
|
|
|||
|
|
@ -1,24 +1,7 @@
|
|||
# The MIT License (MIT)
|
||||
# SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries
|
||||
#
|
||||
# Copyright (c) 2017 Scott Shawcroft for Adafruit Industries
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
""" Helper for requests to pypi.org
|
||||
|
||||
* Author(s): Michael McWethy
|
||||
|
|
|
|||
|
|
@ -1,24 +1,6 @@
|
|||
# The MIT License (MIT)
|
||||
# SPDX-FileCopyrightText: 2019 Michael Schroeder
|
||||
#
|
||||
# Copyright (c) 2019 Michael Schroeder
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""Adabot utility for updating circuitpython.org libraries info."""
|
||||
|
||||
|
|
@ -34,12 +16,12 @@ import sys
|
|||
|
||||
from adabot.lib import common_funcs
|
||||
from adabot.lib import circuitpython_library_validators as cpy_vals
|
||||
from adabot import github_requests as github
|
||||
from adabot import github_requests as gh_reqs
|
||||
from adabot import pypi_requests as pypi
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
ch = logging.StreamHandler(stream=sys.stdout)
|
||||
logging.basicConfig(format="%(message)s", handlers=[ch])
|
||||
logging.basicConfig(level=logging.DEBUG, format="%(message)s", handlers=[ch])
|
||||
|
||||
|
||||
DO_NOT_VALIDATE = [
|
||||
|
|
@ -83,7 +65,7 @@ def get_open_issues_and_prs(repo):
|
|||
open_issues = []
|
||||
open_pull_requests = []
|
||||
params = {"state": "open"}
|
||||
result = github.get("/repos/adafruit/" + repo["name"] + "/issues", params=params)
|
||||
result = gh_reqs.get("/repos/adafruit/" + repo["name"] + "/issues", params=params)
|
||||
if not result.ok:
|
||||
return [], []
|
||||
|
||||
|
|
@ -119,7 +101,7 @@ def get_contributors(repo):
|
|||
reviewers = []
|
||||
merged_pr_count = 0
|
||||
params = {"state": "closed", "sort": "updated", "direction": "desc"}
|
||||
result = github.get("/repos/adafruit/" + repo["name"] + "/pulls", params=params)
|
||||
result = gh_reqs.get("/repos/adafruit/" + repo["name"] + "/pulls", params=params)
|
||||
if result.ok:
|
||||
today_minus_seven = datetime.datetime.today() - datetime.timedelta(days=7)
|
||||
pull_requests = result.json()
|
||||
|
|
@ -139,12 +121,12 @@ def get_contributors(repo):
|
|||
merged_pr_count += 1
|
||||
|
||||
# get reviewers (merged_by, and any others)
|
||||
single_pr = github.get(pull_request["url"])
|
||||
single_pr = gh_reqs.get(pull_request["url"])
|
||||
if not single_pr.ok:
|
||||
continue
|
||||
pr_info = single_pr.json()
|
||||
reviewers.append(pr_info["merged_by"]["login"])
|
||||
pr_reviews = github.get(str(pr_info["url"]) + "/reviews")
|
||||
pr_reviews = gh_reqs.get(str(pr_info["url"]) + "/reviews")
|
||||
if not pr_reviews.ok:
|
||||
continue
|
||||
for review in pr_reviews.json():
|
||||
|
|
@ -171,12 +153,12 @@ def main(
|
|||
logger.info("Run Date: %s", run_time.strftime("%d %B %Y, %I:%M%p"))
|
||||
|
||||
if output_file:
|
||||
logger.info(" - Report output will be saved to: %s", output_file)
|
||||
file_handler = logging.FileHandler(output_file)
|
||||
logger.addHandler(file_handler)
|
||||
logger.info(" - Report output will be saved to: %s", output_file)
|
||||
|
||||
if cache_http:
|
||||
cpy_vals.github.setup_cache(cache_ttl)
|
||||
cpy_vals.gh_reqs.setup_cache(cache_ttl)
|
||||
|
||||
repos = common_funcs.list_repos(
|
||||
include_repos=(
|
||||
|
|
|
|||
|
|
@ -0,0 +1,25 @@
|
|||
From 70552ea19bdb5a06550d429c8e28b54770ce7548 Mon Sep 17 00:00:00 2001
|
||||
From: dherrada <dylan.herrada@adafruit.com>
|
||||
Date: Fri, 5 Nov 2021 14:49:30 -0400
|
||||
Subject: [PATCH] Disabled unspecified-encoding pylint check
|
||||
|
||||
---
|
||||
.pylintrc | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/.pylintrc b/.pylintrc
|
||||
index e78bad2..cfd1c41 100644
|
||||
--- a/.pylintrc
|
||||
+++ b/.pylintrc
|
||||
@@ -55,7 +55,7 @@ confidence=
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
# disable=import-error,print-statement,parameter-unpacking,unpacking-in-except,old-raise-syntax,backtick,long-suffix,old-ne-operator,old-octal-literal,import-star-module-level,raw-checker-failed,bad-inline-option,locally-disabled,locally-enabled,file-ignored,suppressed-message,useless-suppression,deprecated-pragma,apply-builtin,basestring-builtin,buffer-builtin,cmp-builtin,coerce-builtin,execfile-builtin,file-builtin,long-builtin,raw_input-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,no-absolute-import,old-division,dict-iter-method,dict-view-method,next-method-called,metaclass-assignment,indexing-exception,raising-string,reload-builtin,oct-method,hex-method,nonzero-method,cmp-method,input-builtin,round-builtin,intern-builtin,unichr-builtin,map-builtin-not-iterating,zip-builtin-not-iterating,range-builtin-not-iterating,filter-builtin-not-iterating,using-cmp-argument,eq-without-hash,div-method,idiv-method,rdiv-method,exception-message-attribute,invalid-str-codec,sys-max-int,bad-python3-import,deprecated-string-function,deprecated-str-translate-call
|
||||
-disable=print-statement,parameter-unpacking,unpacking-in-except,old-raise-syntax,backtick,long-suffix,old-ne-operator,old-octal-literal,import-star-module-level,raw-checker-failed,bad-inline-option,locally-disabled,locally-enabled,file-ignored,suppressed-message,useless-suppression,deprecated-pragma,apply-builtin,basestring-builtin,buffer-builtin,cmp-builtin,coerce-builtin,execfile-builtin,file-builtin,long-builtin,raw_input-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,no-absolute-import,old-division,dict-iter-method,dict-view-method,next-method-called,metaclass-assignment,indexing-exception,raising-string,reload-builtin,oct-method,hex-method,nonzero-method,cmp-method,input-builtin,round-builtin,intern-builtin,unichr-builtin,map-builtin-not-iterating,zip-builtin-not-iterating,range-builtin-not-iterating,filter-builtin-not-iterating,using-cmp-argument,eq-without-hash,div-method,idiv-method,rdiv-method,exception-message-attribute,invalid-str-codec,sys-max-int,bad-python3-import,deprecated-string-function,deprecated-str-translate-call,import-error,bad-continuation
|
||||
+disable=print-statement,parameter-unpacking,unpacking-in-except,old-raise-syntax,backtick,long-suffix,old-ne-operator,old-octal-literal,import-star-module-level,raw-checker-failed,bad-inline-option,locally-disabled,locally-enabled,file-ignored,suppressed-message,useless-suppression,deprecated-pragma,apply-builtin,basestring-builtin,buffer-builtin,cmp-builtin,coerce-builtin,execfile-builtin,file-builtin,long-builtin,raw_input-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,no-absolute-import,old-division,dict-iter-method,dict-view-method,next-method-called,metaclass-assignment,indexing-exception,raising-string,reload-builtin,oct-method,hex-method,nonzero-method,cmp-method,input-builtin,round-builtin,intern-builtin,unichr-builtin,map-builtin-not-iterating,zip-builtin-not-iterating,range-builtin-not-iterating,filter-builtin-not-iterating,using-cmp-argument,eq-without-hash,div-method,idiv-method,rdiv-method,exception-message-attribute,invalid-str-codec,sys-max-int,bad-python3-import,deprecated-string-function,deprecated-str-translate-call,import-error,bad-continuation,unspecified-encoding
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
--
|
||||
2.25.1
|
||||
|
||||
72
patches/0001-First-gitignore-patch.patch
Normal file
72
patches/0001-First-gitignore-patch.patch
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
From 2a8657e9851b5916cf1733d05b9c47f818570832 Mon Sep 17 00:00:00 2001
|
||||
From: evaherrada <eva.herrada@adafruit.com>
|
||||
Date: Thu, 21 Apr 2022 15:00:27 -0400
|
||||
Subject: [PATCH] Updated gitignore
|
||||
|
||||
---
|
||||
.gitignore | 48 ++++++++++++++++++++++++++++++++++++++++--------
|
||||
1 file changed, 40 insertions(+), 8 deletions(-)
|
||||
|
||||
diff --git a/.gitignore b/.gitignore
|
||||
index 9647e71..544ec4a 100644
|
||||
--- a/.gitignore
|
||||
+++ b/.gitignore
|
||||
@@ -1,15 +1,47 @@
|
||||
-# SPDX-FileCopyrightText: 2021 ladyada for Adafruit Industries
|
||||
+# SPDX-FileCopyrightText: 2022 Kattni Rembor, written for Adafruit Industries
|
||||
#
|
||||
-# SPDX-License-Identifier: Unlicense
|
||||
+# SPDX-License-Identifier: MIT
|
||||
|
||||
+# Do not include files and directories created by your personal work environment, such as the IDE
|
||||
+# you use, except for those already listed here. Pull requests including changes to this file will
|
||||
+# not be accepted.
|
||||
+
|
||||
+# This .gitignore file contains rules for files generated by working with CircuitPython libraries,
|
||||
+# including building Sphinx, testing with pip, and creating a virual environment, as well as the
|
||||
+# MacOS and IDE-specific files generated by using MacOS in general, or the PyCharm or VSCode IDEs.
|
||||
+
|
||||
+# If you find that there are files being generated on your machine that should not be included in
|
||||
+# your git commit, you should create a .gitignore_global file on your computer to include the
|
||||
+# files created by your personal setup. To do so, follow the two steps below.
|
||||
+
|
||||
+# First, create a file called .gitignore_global somewhere convenient for you, and add rules for
|
||||
+# the files you want to exclude from git commits.
|
||||
+
|
||||
+# Second, configure Git to use the exclude file for all Git repositories by running the
|
||||
+# following via commandline, replacing "path/to/your/" with the actual path to your newly created
|
||||
+# .gitignore_global file:
|
||||
+# git config --global core.excludesfile path/to/your/.gitignore_global
|
||||
+
|
||||
+# CircuitPython-specific files
|
||||
*.mpy
|
||||
-.idea
|
||||
+
|
||||
+# Python-specific files
|
||||
__pycache__
|
||||
-_build
|
||||
*.pyc
|
||||
+
|
||||
+# Sphinx build-specific files
|
||||
+_build
|
||||
+
|
||||
+# This file results from running `pip -e install .` in a local repository
|
||||
+*.egg-info
|
||||
+
|
||||
+# Virtual environment-specific files
|
||||
.env
|
||||
-bundles
|
||||
+
|
||||
+# MacOS-specific files
|
||||
*.DS_Store
|
||||
-.eggs
|
||||
-dist
|
||||
-**/*.egg-info
|
||||
+
|
||||
+# IDE-specific files
|
||||
+.idea
|
||||
+.vscode
|
||||
+*~
|
||||
--
|
||||
2.25.1
|
||||
|
||||
86
patches/0001-First-part-of-patch.patch
Normal file
86
patches/0001-First-part-of-patch.patch
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
From efe38badfd18297988c9c67be8ac1108d150a4ca Mon Sep 17 00:00:00 2001
|
||||
From: dherrada <dylan.herrada@adafruit.com>
|
||||
Date: Thu, 13 Jan 2022 16:27:30 -0500
|
||||
Subject: [PATCH] First part of patch
|
||||
|
||||
---
|
||||
.../PULL_REQUEST_TEMPLATE/adafruit_circuitpython_pr.md | 2 +-
|
||||
.github/workflows/build.yml | 6 +++---
|
||||
.github/workflows/release.yml | 8 ++++----
|
||||
.readthedocs.yaml | 2 +-
|
||||
4 files changed, 9 insertions(+), 9 deletions(-)
|
||||
|
||||
diff --git a/.github/PULL_REQUEST_TEMPLATE/adafruit_circuitpython_pr.md b/.github/PULL_REQUEST_TEMPLATE/adafruit_circuitpython_pr.md
|
||||
index 71ef8f8..8de294e 100644
|
||||
--- a/.github/PULL_REQUEST_TEMPLATE/adafruit_circuitpython_pr.md
|
||||
+++ b/.github/PULL_REQUEST_TEMPLATE/adafruit_circuitpython_pr.md
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
Thank you for contributing! Before you submit a pull request, please read the following.
|
||||
|
||||
-Make sure any changes you're submitting are in line with the CircuitPython Design Guide, available here: https://circuitpython.readthedocs.io/en/latest/docs/design_guide.html
|
||||
+Make sure any changes you're submitting are in line with the CircuitPython Design Guide, available here: https://docs.circuitpython.org/en/latest/docs/design_guide.html
|
||||
|
||||
If your changes are to documentation, please verify that the documentation builds locally by following the steps found here: https://adafru.it/build-docs
|
||||
|
||||
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
|
||||
index ca35544..474520d 100644
|
||||
--- a/.github/workflows/build.yml
|
||||
+++ b/.github/workflows/build.yml
|
||||
@@ -22,10 +22,10 @@ jobs:
|
||||
awk -F '\/' '{ print tolower($2) }' |
|
||||
tr '_' '-'
|
||||
)
|
||||
- - name: Set up Python 3.7
|
||||
- uses: actions/setup-python@v1
|
||||
+ - name: Set up Python 3.x
|
||||
+ uses: actions/setup-python@v2
|
||||
with:
|
||||
- python-version: 3.7
|
||||
+ python-version: "3.x"
|
||||
- name: Versions
|
||||
run: |
|
||||
python3 --version
|
||||
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
|
||||
index 6d0015a..a65e5de 100644
|
||||
--- a/.github/workflows/release.yml
|
||||
+++ b/.github/workflows/release.yml
|
||||
@@ -24,10 +24,10 @@ jobs:
|
||||
awk -F '\/' '{ print tolower($2) }' |
|
||||
tr '_' '-'
|
||||
)
|
||||
- - name: Set up Python 3.6
|
||||
- uses: actions/setup-python@v1
|
||||
+ - name: Set up Python 3.x
|
||||
+ uses: actions/setup-python@v2
|
||||
with:
|
||||
- python-version: 3.6
|
||||
+ python-version: "3.x"
|
||||
- name: Versions
|
||||
run: |
|
||||
python3 --version
|
||||
@@ -67,7 +67,7 @@ jobs:
|
||||
echo ::set-output name=setup-py::$( find . -wholename './setup.py' )
|
||||
- name: Set up Python
|
||||
if: contains(steps.need-pypi.outputs.setup-py, 'setup.py')
|
||||
- uses: actions/setup-python@v1
|
||||
+ uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- name: Install dependencies
|
||||
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
|
||||
index 1335112..f8b2891 100644
|
||||
--- a/.readthedocs.yaml
|
||||
+++ b/.readthedocs.yaml
|
||||
@@ -9,7 +9,7 @@
|
||||
version: 2
|
||||
|
||||
python:
|
||||
- version: "3.7"
|
||||
+ version: "3.x"
|
||||
install:
|
||||
- requirements: docs/requirements.txt
|
||||
- requirements: requirements.txt
|
||||
--
|
||||
2.25.1
|
||||
|
||||
30
patches/0001-Fixed-readthedocs-build.patch
Normal file
30
patches/0001-Fixed-readthedocs-build.patch
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
From 609b9a5243fbbc6e0f6014d4aeaa902a1d2ecfc9 Mon Sep 17 00:00:00 2001
|
||||
From: dherrada <dylan.herrada@adafruit.com>
|
||||
Date: Mon, 14 Feb 2022 15:35:02 -0500
|
||||
Subject: [PATCH] Fixed readthedocs build
|
||||
|
||||
---
|
||||
.readthedocs.yaml | 6 +++++-
|
||||
1 file changed, 5 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
|
||||
index f8b2891..33c2a61 100644
|
||||
--- a/.readthedocs.yaml
|
||||
+++ b/.readthedocs.yaml
|
||||
@@ -8,8 +8,12 @@
|
||||
# Required
|
||||
version: 2
|
||||
|
||||
+build:
|
||||
+ os: ubuntu-20.04
|
||||
+ tools:
|
||||
+ python: "3"
|
||||
+
|
||||
python:
|
||||
- version: "3.x"
|
||||
install:
|
||||
- requirements: docs/requirements.txt
|
||||
- requirements: requirements.txt
|
||||
--
|
||||
2.25.1
|
||||
|
||||
105
patches/0001-Pylint,-pre-commit,-readthedocs-patch.patch
Normal file
105
patches/0001-Pylint,-pre-commit,-readthedocs-patch.patch
Normal file
|
|
@ -0,0 +1,105 @@
|
|||
From 45a494ad6b93623b42ac6c96ff261be52ea37537 Mon Sep 17 00:00:00 2001
|
||||
From: dherrada <dylan.herrada@adafruit.com>
|
||||
Date: Wed, 3 Nov 2021 14:40:16 -0400
|
||||
Subject: PATCH Pylint and readthedocs patch test
|
||||
|
||||
---
|
||||
.github/workflows/build.yml | 4 ++--
|
||||
.pre-commit-config.yaml | 26 +++++++++++++++++---------
|
||||
.pylintrc | 2 +-
|
||||
.readthedocs.yml | 2 +-
|
||||
docs/requirements.txt | 5 +++++
|
||||
5 files changed, 26 insertions(+), 13 deletions(-)
|
||||
create mode 100644 docs/requirements.txt
|
||||
|
||||
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
|
||||
index c4c975d..ca35544 100644
|
||||
--- a/.github/workflows/build.yml
|
||||
+++ b/.github/workflows/build.yml
|
||||
@@ -42,9 +42,9 @@ jobs:
|
||||
# (e.g. - apt-get: gettext, etc; pip: circuitpython-build-tools, requirements.txt; etc.)
|
||||
run: |
|
||||
source actions-ci/install.sh
|
||||
- - name: Pip install pylint, Sphinx, pre-commit
|
||||
+ - name: Pip install Sphinx, pre-commit
|
||||
run: |
|
||||
- pip install --force-reinstall pylint Sphinx sphinx-rtd-theme pre-commit
|
||||
+ pip install --force-reinstall Sphinx sphinx-rtd-theme pre-commit
|
||||
- name: Library version
|
||||
run: git describe --dirty --always --tags
|
||||
- name: Pre-commit hooks
|
||||
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
|
||||
index 8810708..1b9fadc 100644
|
||||
--- a/.pre-commit-config.yaml
|
||||
+++ b/.pre-commit-config.yaml
|
||||
@@ -18,17 +18,25 @@ repos:
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- repo: https://github.com/pycqa/pylint
|
||||
- rev: pylint-2.7.1
|
||||
+ rev: v2.11.1
|
||||
hooks:
|
||||
- id: pylint
|
||||
name: pylint (library code)
|
||||
types: [python]
|
||||
- exclude: "^(docs/|examples/|setup.py$)"
|
||||
-- repo: local
|
||||
- hooks:
|
||||
- - id: pylint_examples
|
||||
- name: pylint (examples code)
|
||||
+ args:
|
||||
+ - --disable=consider-using-f-string
|
||||
+ exclude: "^(docs/|examples/|tests/|setup.py$)"
|
||||
+ - id: pylint
|
||||
+ name: pylint (example code)
|
||||
description: Run pylint rules on "examples/*.py" files
|
||||
- entry: /usr/bin/env bash -c
|
||||
- args: ['([[ ! -d "examples" ]] || for example in $(find . -path "./examples/*.py"); do pylint --disable=missing-docstring,invalid-name,consider-using-f-string $example; done)']
|
||||
- language: system
|
||||
+ types: [python]
|
||||
+ files: "^examples/"
|
||||
+ args:
|
||||
+ - --disable=missing-docstring,invalid-name,consider-using-f-string,duplicate-code
|
||||
+ - id: pylint
|
||||
+ name: pylint (test code)
|
||||
+ description: Run pylint rules on "tests/*.py" files
|
||||
+ types: [python]
|
||||
+ files: "^tests/"
|
||||
+ args:
|
||||
+ - --disable=missing-docstring,consider-using-f-string,duplicate-code
|
||||
diff --git a/.pylintrc b/.pylintrc
|
||||
index aed1e4c..12a028e 100644
|
||||
--- a/.pylintrc
|
||||
+++ b/.pylintrc
|
||||
@@ -252,7 +252,7 @@ ignore-docstrings=yes
|
||||
ignore-imports=yes
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
-min-similarity-lines=12
|
||||
+min-similarity-lines=4
|
||||
|
||||
|
||||
[BASIC]
|
||||
diff --git a/.readthedocs.yml b/.readthedocs.yml
|
||||
index ffa84c4..49dcab3 100644
|
||||
--- a/.readthedocs.yml
|
||||
+++ b/.readthedocs.yml
|
||||
@@ -4,4 +4,4 @@
|
||||
|
||||
python:
|
||||
version: 3
|
||||
-requirements_file: requirements.txt
|
||||
+requirements_file: docs/requirements.txt
|
||||
diff --git a/docs/requirements.txt b/docs/requirements.txt
|
||||
new file mode 100644
|
||||
index 0000000..88e6733
|
||||
--- /dev/null
|
||||
+++ b/docs/requirements.txt
|
||||
@@ -0,0 +1,5 @@
|
||||
+# SPDX-FileCopyrightText: 2021 Kattni Rembor for Adafruit Industries
|
||||
+#
|
||||
+# SPDX-License-Identifier: Unlicense
|
||||
+
|
||||
+sphinx>=4.0.0
|
||||
--
|
||||
2.25.1
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
From 938c68803029b40ba783141373957614350bba67 Mon Sep 17 00:00:00 2001
|
||||
From: evaherrada <eva.herrada@adafruit.com>
|
||||
Date: Tue, 21 Jun 2022 17:00:37 -0400
|
||||
Subject: [PATCH] Removed duplicate-code from library pylint disable
|
||||
|
||||
---
|
||||
.pre-commit-config.yaml | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
|
||||
index 0a91a11..3343606 100644
|
||||
--- a/.pre-commit-config.yaml
|
||||
+++ b/.pre-commit-config.yaml
|
||||
@@ -24,7 +24,7 @@ repos:
|
||||
name: pylint (library code)
|
||||
types: [python]
|
||||
args:
|
||||
- - --disable=consider-using-f-string,duplicate-code
|
||||
+ - --disable=consider-using-f-string
|
||||
exclude: "^(docs/|examples/|tests/|setup.py$)"
|
||||
- id: pylint
|
||||
name: pylint (example code)
|
||||
--
|
||||
2.25.1
|
||||
|
||||
74
patches/0001-Second-gitignore-patch.patch
Normal file
74
patches/0001-Second-gitignore-patch.patch
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
From 3e8355111337836c52f6ef121641712d4ca9f6d2 Mon Sep 17 00:00:00 2001
|
||||
From: evaherrada <eva.herrada@adafruit.com>
|
||||
Date: Thu, 21 Apr 2022 15:45:16 -0400
|
||||
Subject: [PATCH] Updated gitignore
|
||||
|
||||
---
|
||||
.gitignore | 49 +++++++++++++++++++++++++++++++++++++++----------
|
||||
1 file changed, 39 insertions(+), 10 deletions(-)
|
||||
|
||||
diff --git a/.gitignore b/.gitignore
|
||||
index 2c6ddfd..544ec4a 100644
|
||||
--- a/.gitignore
|
||||
+++ b/.gitignore
|
||||
@@ -1,18 +1,47 @@
|
||||
-# SPDX-FileCopyrightText: 2017 Scott Shawcroft, written for Adafruit Industries
|
||||
+# SPDX-FileCopyrightText: 2022 Kattni Rembor, written for Adafruit Industries
|
||||
#
|
||||
-# SPDX-License-Identifier: Unlicense
|
||||
+# SPDX-License-Identifier: MIT
|
||||
|
||||
+# Do not include files and directories created by your personal work environment, such as the IDE
|
||||
+# you use, except for those already listed here. Pull requests including changes to this file will
|
||||
+# not be accepted.
|
||||
+
|
||||
+# This .gitignore file contains rules for files generated by working with CircuitPython libraries,
|
||||
+# including building Sphinx, testing with pip, and creating a virual environment, as well as the
|
||||
+# MacOS and IDE-specific files generated by using MacOS in general, or the PyCharm or VSCode IDEs.
|
||||
+
|
||||
+# If you find that there are files being generated on your machine that should not be included in
|
||||
+# your git commit, you should create a .gitignore_global file on your computer to include the
|
||||
+# files created by your personal setup. To do so, follow the two steps below.
|
||||
+
|
||||
+# First, create a file called .gitignore_global somewhere convenient for you, and add rules for
|
||||
+# the files you want to exclude from git commits.
|
||||
+
|
||||
+# Second, configure Git to use the exclude file for all Git repositories by running the
|
||||
+# following via commandline, replacing "path/to/your/" with the actual path to your newly created
|
||||
+# .gitignore_global file:
|
||||
+# git config --global core.excludesfile path/to/your/.gitignore_global
|
||||
+
|
||||
+# CircuitPython-specific files
|
||||
*.mpy
|
||||
-.idea
|
||||
+
|
||||
+# Python-specific files
|
||||
__pycache__
|
||||
-_build
|
||||
*.pyc
|
||||
+
|
||||
+# Sphinx build-specific files
|
||||
+_build
|
||||
+
|
||||
+# This file results from running `pip -e install .` in a local repository
|
||||
+*.egg-info
|
||||
+
|
||||
+# Virtual environment-specific files
|
||||
.env
|
||||
-.python-version
|
||||
-build*/
|
||||
-bundles
|
||||
+
|
||||
+# MacOS-specific files
|
||||
*.DS_Store
|
||||
-.eggs
|
||||
-dist
|
||||
-**/*.egg-info
|
||||
+
|
||||
+# IDE-specific files
|
||||
+.idea
|
||||
.vscode
|
||||
+*~
|
||||
--
|
||||
2.25.1
|
||||
|
||||
25
patches/0001-Update-Black-to-latest.patch
Normal file
25
patches/0001-Update-Black-to-latest.patch
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
From 94efa5385a95c41e2564e9f0a2760728d689eead Mon Sep 17 00:00:00 2001
|
||||
From: Kattni Rembor <kattni@adafruit.com>
|
||||
Date: Mon, 28 Mar 2022 15:52:04 -0400
|
||||
Subject: [PATCH] Update Black to latest.
|
||||
|
||||
---
|
||||
.pre-commit-config.yaml | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
|
||||
index 1b9fadc..7467c1d 100644
|
||||
--- a/.pre-commit-config.yaml
|
||||
+++ b/.pre-commit-config.yaml
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/python/black
|
||||
- rev: 20.8b1
|
||||
+ rev: 22.3.0
|
||||
hooks:
|
||||
- id: black
|
||||
- repo: https://github.com/fsfe/reuse-tool
|
||||
--
|
||||
2.33.1
|
||||
|
||||
49
patches/0001-Updated-readthedocs-file.patch
Normal file
49
patches/0001-Updated-readthedocs-file.patch
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
From df685288b18965c4089a8895b0eb3bf80c17423e Mon Sep 17 00:00:00 2001
|
||||
From: dherrada <dylan.herrada@adafruit.com>
|
||||
Date: Tue, 9 Nov 2021 13:31:14 -0500
|
||||
Subject: [PATCH] Updated readthedocs file
|
||||
|
||||
---
|
||||
.readthedocs.yaml | 15 +++++++++++++++
|
||||
.readthedocs.yml | 7 -------
|
||||
2 files changed, 15 insertions(+), 7 deletions(-)
|
||||
create mode 100644 .readthedocs.yaml
|
||||
delete mode 100644 .readthedocs.yml
|
||||
|
||||
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
|
||||
new file mode 100644
|
||||
index 0000000..95ec218
|
||||
--- /dev/null
|
||||
+++ b/.readthedocs.yaml
|
||||
@@ -0,0 +1,15 @@
|
||||
+# SPDX-FileCopyrightText: 2021 ladyada for Adafruit Industries
|
||||
+#
|
||||
+# SPDX-License-Identifier: Unlicense
|
||||
+
|
||||
+# Read the Docs configuration file
|
||||
+# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
+
|
||||
+# Required
|
||||
+version: 2
|
||||
+
|
||||
+python:
|
||||
+ version: "3.6"
|
||||
+ install:
|
||||
+ - requirements: docs/requirements.txt
|
||||
+ - requirements: requirements.txt
|
||||
diff --git a/.readthedocs.yml b/.readthedocs.yml
|
||||
deleted file mode 100644
|
||||
index 49dcab3..0000000
|
||||
--- a/.readthedocs.yml
|
||||
+++ /dev/null
|
||||
@@ -1,7 +0,0 @@
|
||||
-# SPDX-FileCopyrightText: 2021 ladyada for Adafruit Industries
|
||||
-#
|
||||
-# SPDX-License-Identifier: Unlicense
|
||||
-
|
||||
-python:
|
||||
- version: 3
|
||||
-requirements_file: docs/requirements.txt
|
||||
--
|
||||
2.25.1
|
||||
|
||||
21
patches/01192023_release_patch.patch
Normal file
21
patches/01192023_release_patch.patch
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
From 31490c26ab114fde7462765cbfa3f67afce33bc0 Mon Sep 17 00:00:00 2001
|
||||
From: Alec Delaney <89490472+tekktrik@users.noreply.github.com>
|
||||
Date: Thu, 19 Jan 2023 23:39:55 -0500
|
||||
Subject: [PATCH] Add upload url to release action
|
||||
|
||||
---
|
||||
.github/workflows/release_gh.yml | 1 +
|
||||
1 file changed, 1 insertion(+)
|
||||
|
||||
diff --git a/.github/workflows/release_gh.yml b/.github/workflows/release_gh.yml
|
||||
index b8aa8d6..9acec60 100644
|
||||
--- a/.github/workflows/release_gh.yml
|
||||
+++ b/.github/workflows/release_gh.yml
|
||||
@@ -16,3 +16,4 @@ jobs:
|
||||
uses: adafruit/workflows-circuitpython-libs/release-gh@main
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
+ upload-url: ${{ github.event.release.upload_url }}
|
||||
--
|
||||
2.39.0
|
||||
|
||||
24
patches/01SEP2022_venv_gitignore.patch
Normal file
24
patches/01SEP2022_venv_gitignore.patch
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
From 72726ff54b3b5782181f6ab2057ce84258a94277 Mon Sep 17 00:00:00 2001
|
||||
From: Alec Delaney <89490472+tekktrik@users.noreply.github.com>
|
||||
Date: Thu, 1 Sep 2022 20:16:31 -0400
|
||||
Subject: [PATCH] Add .venv to .gitignore
|
||||
|
||||
---
|
||||
.gitignore | 1 +
|
||||
1 file changed, 1 insertion(+)
|
||||
|
||||
diff --git a/.gitignore b/.gitignore
|
||||
index 544ec4a..db3d538 100644
|
||||
--- a/.gitignore
|
||||
+++ b/.gitignore
|
||||
@@ -37,6 +37,7 @@ _build
|
||||
|
||||
# Virtual environment-specific files
|
||||
.env
|
||||
+.venv
|
||||
|
||||
# MacOS-specific files
|
||||
*.DS_Store
|
||||
--
|
||||
2.37.2
|
||||
|
||||
25
patches/05302022_set_doc_language.patch
Normal file
25
patches/05302022_set_doc_language.patch
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
From c87dc6f80fc8eac93b266103aef8dc9683301b01 Mon Sep 17 00:00:00 2001
|
||||
From: Alec Delaney <tekktrik@gmail.com>
|
||||
Date: Mon, 30 May 2022 14:25:04 -0400
|
||||
Subject: [PATCH] Set language to "en" for documentation
|
||||
|
||||
---
|
||||
docs/conf.py | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/docs/conf.py b/docs/conf.py
|
||||
index cb5dde7..c58fe52 100644
|
||||
--- a/docs/conf.py
|
||||
+++ b/docs/conf.py
|
||||
@@ -57,7 +57,7 @@ release = "1.0"
|
||||
#
|
||||
# This is also used if you do content translation via gettext catalogs.
|
||||
# Usually you set "language" from the command line for these cases.
|
||||
-language = None
|
||||
+language = "en"
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
--
|
||||
2.36.1
|
||||
|
||||
24
patches/09AUG2022-setuptools-scm.patch
Normal file
24
patches/09AUG2022-setuptools-scm.patch
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
From a445ef4116a4f863fe532cdc3295f73876db400f Mon Sep 17 00:00:00 2001
|
||||
From: Alec Delaney <tekktrik@gmail.com>
|
||||
Date: Tue, 9 Aug 2022 12:03:54 -0400
|
||||
Subject: [PATCH] Add setuptools-scm to build system requirements
|
||||
|
||||
---
|
||||
pyproject.toml | 1 +
|
||||
1 file changed, 1 insertion(+)
|
||||
|
||||
diff --git a/pyproject.toml b/pyproject.toml
|
||||
index 822021e..0c8d672 100644
|
||||
--- a/pyproject.toml
|
||||
+++ b/pyproject.toml
|
||||
@@ -6,6 +6,7 @@
|
||||
requires = [
|
||||
"setuptools",
|
||||
"wheel",
|
||||
+ "setuptools-scm",
|
||||
]
|
||||
|
||||
[project]
|
||||
--
|
||||
2.35.1.windows.2
|
||||
|
||||
42
patches/09MAY2023_precommit_update.patch
Normal file
42
patches/09MAY2023_precommit_update.patch
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
From a70dfa8cd8a37dbd5674a4efd0ace8597dc44be5 Mon Sep 17 00:00:00 2001
|
||||
From: Tekktrik <tekktrik@gmail.com>
|
||||
Date: Tue, 9 May 2023 20:26:25 -0400
|
||||
Subject: [PATCH] Update pre-commit hooks
|
||||
|
||||
---
|
||||
.pre-commit-config.yaml | 8 ++++----
|
||||
1 file changed, 4 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
|
||||
index 0e5fccc..70ade69 100644
|
||||
--- a/.pre-commit-config.yaml
|
||||
+++ b/.pre-commit-config.yaml
|
||||
@@ -4,21 +4,21 @@
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/python/black
|
||||
- rev: 22.3.0
|
||||
+ rev: 23.3.0
|
||||
hooks:
|
||||
- id: black
|
||||
- repo: https://github.com/fsfe/reuse-tool
|
||||
- rev: v0.14.0
|
||||
+ rev: v1.1.2
|
||||
hooks:
|
||||
- id: reuse
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
- rev: v4.2.0
|
||||
+ rev: v4.4.0
|
||||
hooks:
|
||||
- id: check-yaml
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- repo: https://github.com/pycqa/pylint
|
||||
- rev: v2.15.5
|
||||
+ rev: v2.17.4
|
||||
hooks:
|
||||
- id: pylint
|
||||
name: pylint (library code)
|
||||
--
|
||||
2.40.0
|
||||
|
||||
33
patches/11SEP2023_fix_rtd_theme.patch
Normal file
33
patches/11SEP2023_fix_rtd_theme.patch
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
Subject: [PATCH] fix rtd theme
|
||||
---
|
||||
Index: docs/conf.py
|
||||
IDEA additional info:
|
||||
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
|
||||
<+>UTF-8
|
||||
===================================================================
|
||||
diff --git a/docs/conf.py b/docs/conf.py
|
||||
--- a/docs/conf.py
|
||||
+++ b/docs/conf.py
|
||||
@@ -101,19 +101,10 @@
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
#
|
||||
-on_rtd = os.environ.get("READTHEDOCS", None) == "True"
|
||||
-
|
||||
-if not on_rtd: # only import and set the theme if we're building docs locally
|
||||
- try:
|
||||
- import sphinx_rtd_theme
|
||||
+import sphinx_rtd_theme
|
||||
|
||||
- html_theme = "sphinx_rtd_theme"
|
||||
- html_theme_path = [sphinx_rtd_theme.get_html_theme_path(), "."]
|
||||
- except:
|
||||
- html_theme = "default"
|
||||
- html_theme_path = ["."]
|
||||
-else:
|
||||
- html_theme_path = ["."]
|
||||
+html_theme = "sphinx_rtd_theme"
|
||||
+html_theme_path = [sphinx_rtd_theme.get_html_theme_path(), "."]
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
45
patches/14MAY2023_pylintrc_jquery.patch
Normal file
45
patches/14MAY2023_pylintrc_jquery.patch
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
From 6467782ca1523e6d77cb6b857d16d6d6df1feeb7 Mon Sep 17 00:00:00 2001
|
||||
From: Tekktrik <tekktrik@gmail.com>
|
||||
Date: Sun, 14 May 2023 13:00:32 -0400
|
||||
Subject: [PATCH] Update .pylintrc, fix jQuery for docs
|
||||
|
||||
---
|
||||
.pylintrc | 2 +-
|
||||
docs/conf.py | 1 +
|
||||
docs/requirements.txt | 1 +
|
||||
3 files changed, 3 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/.pylintrc b/.pylintrc
|
||||
index 40208c3..f945e92 100644
|
||||
--- a/.pylintrc
|
||||
+++ b/.pylintrc
|
||||
@@ -396,4 +396,4 @@ min-public-methods=1
|
||||
|
||||
# Exceptions that will emit a warning when being caught. Defaults to
|
||||
# "Exception"
|
||||
-overgeneral-exceptions=Exception
|
||||
+overgeneral-exceptions=builtins.Exception
|
||||
diff --git a/docs/conf.py b/docs/conf.py
|
||||
index 7c368fb..f24dd46 100644
|
||||
--- a/docs/conf.py
|
||||
+++ b/docs/conf.py
|
||||
@@ -17,6 +17,7 @@ sys.path.insert(0, os.path.abspath(".."))
|
||||
# ones.
|
||||
extensions = [
|
||||
"sphinx.ext.autodoc",
|
||||
+ "sphinxcontrib.jquery",
|
||||
"sphinx.ext.intersphinx",
|
||||
"sphinx.ext.napoleon",
|
||||
"sphinx.ext.todo",
|
||||
diff --git a/docs/requirements.txt b/docs/requirements.txt
|
||||
index 88e6733..797aa04 100644
|
||||
--- a/docs/requirements.txt
|
||||
+++ b/docs/requirements.txt
|
||||
@@ -3,3 +3,4 @@
|
||||
# SPDX-License-Identifier: Unlicense
|
||||
|
||||
sphinx>=4.0.0
|
||||
+sphinxcontrib-jquery
|
||||
--
|
||||
2.40.1
|
||||
|
||||
30
patches/inclusive-terminology.patch
Normal file
30
patches/inclusive-terminology.patch
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
From f351f391e7bb66c99b8a17b04f7f48a68f0a6a0a Mon Sep 17 00:00:00 2001
|
||||
From: Alec Delaney <tekktrik@gmail.com>
|
||||
Date: Sun, 22 May 2022 00:18:23 -0400
|
||||
Subject: [PATCH 1/2] Switch to inclusive terminology
|
||||
|
||||
---
|
||||
.pylintrc | 4 ++--
|
||||
1 file changed, 2 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/.pylintrc b/.pylintrc
|
||||
index 4e50ab9..e06d2f6 100644
|
||||
--- a/.pylintrc
|
||||
+++ b/.pylintrc
|
||||
@@ -9,11 +9,11 @@
|
||||
# run arbitrary code
|
||||
extension-pkg-whitelist=
|
||||
|
||||
-# Add files or directories to the blacklist. They should be base names, not
|
||||
+# Add files or directories to the ignore-list. They should be base names, not
|
||||
# paths.
|
||||
ignore=CVS
|
||||
|
||||
-# Add files or directories matching the regex patterns to the blacklist. The
|
||||
+# Add files or directories matching the regex patterns to the ignore-list. The
|
||||
# regex matches against base names, not paths.
|
||||
ignore-patterns=
|
||||
|
||||
--
|
||||
2.35.1
|
||||
|
||||
25
patches/lines-similarity.patch
Normal file
25
patches/lines-similarity.patch
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
From 1e29fcc30ba460dd12e19ec1cb9512af56f7e01d Mon Sep 17 00:00:00 2001
|
||||
From: Alec Delaney <tekktrik@gmail.com>
|
||||
Date: Sun, 22 May 2022 00:18:55 -0400
|
||||
Subject: [PATCH 2/2] Increase min lines similarity
|
||||
|
||||
---
|
||||
.pylintrc | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/.pylintrc b/.pylintrc
|
||||
index e06d2f6..fe0cbee 100644
|
||||
--- a/.pylintrc
|
||||
+++ b/.pylintrc
|
||||
@@ -252,7 +252,7 @@ ignore-docstrings=yes
|
||||
ignore-imports=yes
|
||||
|
||||
# Minimum lines number of a similarity.
|
||||
-min-similarity-lines=4
|
||||
+min-similarity-lines=12
|
||||
|
||||
|
||||
[BASIC]
|
||||
--
|
||||
2.35.1
|
||||
|
||||
|
|
@ -1,3 +1,7 @@
|
|||
# SPDX-FileCopyrightText: 2021 Michael Schroeder
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
[pytest]
|
||||
addopts = -v --tb=short --show-capture=no
|
||||
testpaths = tests/unit/ tests/integration/
|
||||
|
|
|
|||
|
|
@ -1,9 +1,19 @@
|
|||
black==21.6b0
|
||||
packaging==20.3
|
||||
pylint
|
||||
# SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
black==22.3.0
|
||||
packaging==23.2
|
||||
pylint==2.11.1
|
||||
pytest
|
||||
pyyaml==5.4.1
|
||||
redis==2.10.6
|
||||
requests==2.25.1
|
||||
pyyaml==6.0.1
|
||||
redis==4.5.4
|
||||
requests==2.31.0
|
||||
sh==1.12.14
|
||||
requests-cache==0.5.2
|
||||
parse==1.19.0
|
||||
GitPython==3.1.37
|
||||
PyGithub==2.1.1
|
||||
typing-extensions~=4.0
|
||||
google-auth~=2.13
|
||||
google-cloud-bigquery~=3.3
|
||||
|
|
|
|||
|
|
@ -1,3 +1,7 @@
|
|||
# SPDX-FileCopyrightText: 2017 Scott Shawcroft for Adafruit Industries
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
# Go here to generate a github access token:
|
||||
# https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/
|
||||
# KEEP THIS TOKEN SECRET AND SAFE! Anyone with access to the token has FULL CONTROL of your GitHub account!
|
||||
|
|
|
|||
15
tests/conftest.py
Normal file
15
tests/conftest.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# SPDX-FileCopyrightText: 2022 Alec Delaney, for Adafruit Industries
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""Configuration file for pytest (along with `pytest.ini`)"""
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
"""Add options to the `pytest` command"""
|
||||
parser.addoption(
|
||||
"--use-tokens",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Test commands that use environment tokens",
|
||||
)
|
||||
|
|
@ -1,24 +1,6 @@
|
|||
# The MIT License (MIT)
|
||||
# SPDX-FileCopyrightText: 2021 Michael Schroeder
|
||||
#
|
||||
# Copyright (c) 2021 Michael Schroeder
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""Integration tests for 'adabot/arduino_libraries.py'"""
|
||||
|
||||
|
|
|
|||
|
|
@ -1,24 +1,6 @@
|
|||
# The MIT License (MIT)
|
||||
# SPDX-FileCopyrightText: 2021 Michael Schroeder
|
||||
#
|
||||
# Copyright (c) 2021 Michael Schroeder
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""Integration tests for 'adabot/circuitpython_libraries.py'"""
|
||||
|
||||
|
|
@ -28,6 +10,9 @@ from adabot.lib import common_funcs
|
|||
from adabot import github_requests
|
||||
from adabot import circuitpython_libraries
|
||||
|
||||
from adabot.lib import circuitpython_library_validators
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def mock_list_repos(*args, **kwargs):
|
||||
"""Function to monkeypatch `common_funcs.list_repos()` for a shorter set of repos."""
|
||||
|
|
@ -36,23 +21,51 @@ def mock_list_repos(*args, **kwargs):
|
|||
]
|
||||
|
||||
|
||||
def test_circuitpython_libraries(monkeypatch):
|
||||
def test_circuitpython_libraries(monkeypatch, pytestconfig):
|
||||
"""Test main function of 'circuitpyton_libraries.py', without writing an output file."""
|
||||
|
||||
monkeypatch.setattr(common_funcs, "list_repos", mock_list_repos)
|
||||
|
||||
circuitpython_libraries.main(validator="all")
|
||||
# Delete specific tests that require repository secrets
|
||||
# They can't be tested via, so let's remove them and test the others
|
||||
if not pytestconfig.getoption("--use-tokens"):
|
||||
vals = [
|
||||
validator[0]
|
||||
for validator in circuitpython_libraries.default_validators
|
||||
if validator[0]
|
||||
not in circuitpython_library_validators.LibraryValidator.get_token_methods()
|
||||
]
|
||||
vals_str = ",".join(vals)
|
||||
else:
|
||||
vals_str = "all"
|
||||
|
||||
circuitpython_libraries.main(validator=vals_str)
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
def test_circuitpython_libraries_output_file(monkeypatch, tmp_path, capsys):
|
||||
def test_circuitpython_libraries_output_file(
|
||||
monkeypatch, pytestconfig, tmp_path, capsys
|
||||
):
|
||||
"""Test main funciton of 'circuitpython_libraries.py', with writing an output file."""
|
||||
|
||||
monkeypatch.setattr(common_funcs, "list_repos", mock_list_repos)
|
||||
|
||||
# Delete specific tests that require repository secrets
|
||||
# They can't be tested via, so let's remove them and test the others
|
||||
if not pytestconfig.getoption("--use-tokens"):
|
||||
vals = [
|
||||
validator[0]
|
||||
for validator in circuitpython_libraries.default_validators
|
||||
if validator[0]
|
||||
not in circuitpython_library_validators.LibraryValidator.get_token_methods()
|
||||
]
|
||||
vals_str = ",".join(vals)
|
||||
else:
|
||||
vals_str = "all"
|
||||
|
||||
tmp_output_file = tmp_path / "output_test.txt"
|
||||
|
||||
circuitpython_libraries.main(validator="all", output_file=tmp_output_file)
|
||||
circuitpython_libraries.main(validator=vals_str, output_file=tmp_output_file)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
|
||||
|
|
|
|||
|
|
@ -1,39 +1,60 @@
|
|||
# The MIT License (MIT)
|
||||
# SPDX-FileCopyrightText: 2021 Michael Schroeder
|
||||
#
|
||||
# Copyright (c) 2021 Michael Schroeder
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""Integration tests for 'adabot/update_cp_org_libraries.py'"""
|
||||
|
||||
import json
|
||||
|
||||
import pytest # pylint: disable=unused-import
|
||||
|
||||
from adabot.lib import common_funcs
|
||||
from adabot import github_requests
|
||||
from adabot import update_cp_org_libraries
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def mock_list_repos(*args, **kwargs):
|
||||
"""Function to monkeypatch `common_funcs.list_repos()` for a shorter set of repos."""
|
||||
return [
|
||||
github_requests.get("/repos/adafruit/Adafruit_CircuitPython_TestRepo").json()
|
||||
]
|
||||
repos = []
|
||||
result = github_requests.get(
|
||||
"/search/repositories",
|
||||
params={
|
||||
"q": "Adafruit_CircuitPython user:adafruit archived:false fork:true",
|
||||
"per_page": 100,
|
||||
"sort": "updated",
|
||||
"order": "asc",
|
||||
},
|
||||
)
|
||||
|
||||
if result.ok:
|
||||
repos.extend(
|
||||
repo
|
||||
for repo in result.json()["items"]
|
||||
if (
|
||||
repo["owner"]["login"] == "adafruit"
|
||||
and (
|
||||
repo["name"].startswith("Adafruit_CircuitPython")
|
||||
or repo["name"] == "circuitpython"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
repo_names = [repo["name"] for repo in repos]
|
||||
|
||||
if kwargs.get("include_repos", False):
|
||||
for repo in kwargs["include_repos"]:
|
||||
if repo not in repo_names:
|
||||
add_repo = github_requests.get("/repos/adafruit/" + repo)
|
||||
if add_repo.ok:
|
||||
repos.append(add_repo.json())
|
||||
else:
|
||||
print("list_repos(): Failed to retrieve '{}'".format(repo))
|
||||
|
||||
if len(repos) > 5:
|
||||
repos = repos[:5]
|
||||
|
||||
return repos
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
|
@ -54,7 +75,7 @@ def test_update_cp_org_libraries(monkeypatch):
|
|||
monkeypatch.setattr(common_funcs, "list_repos", mock_list_repos)
|
||||
monkeypatch.setattr(update_cp_org_libraries, "get_contributors", mock_get_contribs)
|
||||
|
||||
update_cp_org_libraries.main()
|
||||
update_cp_org_libraries.main(loglevel="INFO")
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
|
@ -66,8 +87,8 @@ def test_update_cp_org_libraries_output_file(monkeypatch, tmp_path, capsys):
|
|||
|
||||
tmp_output_file = tmp_path / "output_test.txt"
|
||||
|
||||
update_cp_org_libraries.main(output_file=tmp_output_file)
|
||||
update_cp_org_libraries.main(loglevel="INFO", output_file=tmp_output_file)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
output = tmp_output_file.read_text()
|
||||
|
||||
assert tmp_output_file.read_text() == captured.out
|
||||
assert json.loads(output)
|
||||
|
|
|
|||
|
|
@ -1,24 +1,6 @@
|
|||
# The MIT License (MIT)
|
||||
# SPDX-FileCopyrightText: 2021 Michael Schroeder
|
||||
#
|
||||
# Copyright (c) 2021 Michael Schroeder
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""Unit tests for 'adabot/lib/blinka_funcs.py'"""
|
||||
|
||||
|
|
|
|||
|
|
@ -1,24 +1,6 @@
|
|||
# The MIT License (MIT)
|
||||
# SPDX-FileCopyrightText: 2021 Michael Schroeder
|
||||
#
|
||||
# Copyright (c) 2021 Michael Schroeder
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""Unit tests for 'adabot/lib/common_funcs.py'"""
|
||||
|
||||
|
|
|
|||
|
|
@ -1,24 +1,6 @@
|
|||
# The MIT License (MIT)
|
||||
# SPDX-FileCopyrightText: 2021 Michael Schroeder
|
||||
#
|
||||
# Copyright (c) 2021 Michael Schroeder
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""Unit tests for 'adabot/github_requests.py'"""
|
||||
|
||||
|
|
|
|||
|
|
@ -1,24 +1,6 @@
|
|||
# The MIT License (MIT)
|
||||
# SPDX-FileCopyrightText: 2021 Michael Schroeder
|
||||
#
|
||||
# Copyright (c) 2021 Michael Schroeder
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""Unit tests for 'adabot/pypi_requests.py'"""
|
||||
|
||||
|
|
|
|||
67
tools/README.md
Normal file
67
tools/README.md
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
# Adabot Tools and Scripts
|
||||
|
||||
|
||||
#### library_functions.py
|
||||
|
||||
Functions and typing protocols used for working with bundle libraries.
|
||||
Pairs with functionality in `iterate_libraries.py` to create functions
|
||||
for working with the Adafruit CircuitPython Bundle.
|
||||
|
||||
|
||||
#### iterate_libraries.py
|
||||
|
||||
Function for looping through the libraries in the bundle. There is a
|
||||
function for iterating through a cloned bundle ("local") and another
|
||||
for iterating through the bundle libraries on GitHun ("remote"). These
|
||||
functions allow for a single function (see `library_functions.py`) to
|
||||
act upon all libraries/repositories.
|
||||
|
||||
|
||||
#### git_functionality.py
|
||||
|
||||
Provides basic git functionality such as syncing libraries and pushing
|
||||
changes to the remote when working with a cloned library. In particular,
|
||||
it defines decorators that can be used with functions described by
|
||||
`library_functions.LocalLibFunc`.
|
||||
|
||||
|
||||
#### ci_status.py
|
||||
|
||||
Provides functionality for checking the GitHub Actions status of bundle
|
||||
libraries. Note that a GitHub token with the proper scope must be given
|
||||
(the one needed for working with adabot should be enough).
|
||||
|
||||
|
||||
#### docs_status.py
|
||||
|
||||
Provides functionality for checking the ReadTheDocs build status of
|
||||
bundle libraries. Note that both a Github and ReadTheDocs token must be
|
||||
given (the GitHub token used by adabot should be enough, but a token
|
||||
from adabot's account on ReadTheDocs will be needed).
|
||||
|
||||
|
||||
#### find_text.py
|
||||
|
||||
Script to check for text across CircuitPython repos.
|
||||
|
||||
Type:
|
||||
`python3 find_text.py -h`
|
||||
to figure out how to use it.
|
||||
|
||||
|
||||
#### runner.py
|
||||
|
||||
Script to run specific CircuitPython library validators one at a time.
|
||||
|
||||
Must be run from top-level directory (i.e. one up from this one).
|
||||
|
||||
Run with:
|
||||
`python3 runner.py`
|
||||
and then type in the number you want to run.
|
||||
|
||||
|
||||
#### file_compare.py
|
||||
|
||||
Functionality to compare a file across all Adafruit CircuitPython repos
|
||||
and output the text of the files along with which and how many repos use that
|
||||
exact file text.
|
||||
3
tools/README.md.license
Normal file
3
tools/README.md.license
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
SPDX-FileCopyrightText: 2022 Kattni Rembor
|
||||
|
||||
SPDX-License-Identifier: MIT
|
||||
390
tools/ci_status.py
Normal file
390
tools/ci_status.py
Normal file
|
|
@ -0,0 +1,390 @@
|
|||
# SPDX-FileCopyrightText: 2022 Alec Delaney
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
|
||||
ci_status.py
|
||||
============
|
||||
|
||||
Functionality using ``PyGithub`` to check the CI status of repos
|
||||
contained within the Adafruit CircuitPython Bundle
|
||||
|
||||
* Author(s): Alec Delaney
|
||||
|
||||
"""
|
||||
|
||||
from typing import Optional
|
||||
import argparse
|
||||
import time
|
||||
from github.Repository import Repository
|
||||
from github.Workflow import Workflow
|
||||
from github.WorkflowRun import WorkflowRun
|
||||
from github.GithubException import GithubException
|
||||
from library_functions import StrPath
|
||||
from iterate_libraries import (
|
||||
iter_remote_bundle_with_func,
|
||||
RemoteLibFunc_IterResult,
|
||||
)
|
||||
|
||||
|
||||
def run_gh_rest_check(
|
||||
lib_repo: Repository,
|
||||
user: Optional[str] = None,
|
||||
branch: Optional[str] = None,
|
||||
workflow_filename: Optional[str] = "build.yml",
|
||||
) -> str:
|
||||
"""Uses ``PyGithub`` to check the CI status of a repository
|
||||
|
||||
:param Repository lib_repo: The repo as a github.Repository.Repository object
|
||||
:param str|None user: The user that triggered the run; if `None` is
|
||||
provided, any user is acceptable
|
||||
:param str|None branch: The branch name to specifically check; if `None` is
|
||||
provided, all branches are allowed; this is the default
|
||||
:param str|None workflow_filename: The filename of the workflow; if `None` is
|
||||
provided, any workflow name is acceptable; the default is ``"build.yml"``
|
||||
:return: The requested runs conclusion
|
||||
:rtype: str
|
||||
"""
|
||||
|
||||
arg_dict = {}
|
||||
if user is not None:
|
||||
arg_dict["actor"] = user
|
||||
if branch is not None:
|
||||
arg_dict["branch"] = branch
|
||||
|
||||
workflow: Workflow = lib_repo.get_workflow(workflow_filename)
|
||||
workflow_runs = workflow.get_runs(**arg_dict)
|
||||
return workflow_runs[0].conclusion
|
||||
|
||||
|
||||
def run_gh_rest_rerun(
|
||||
lib_repo: Repository,
|
||||
user: Optional[str] = None,
|
||||
branch: Optional[str] = None,
|
||||
workflow_filename: Optional[str] = "build.yml",
|
||||
rerun_level: int = 0,
|
||||
) -> bool:
|
||||
"""Uses ``PyGithub`` to rerun the CI status of a repository
|
||||
|
||||
:param Repository lib_repo: The repo as a github.Repository.Repository object
|
||||
:param str|None user: The user that triggered the run; if `None` is
|
||||
provided, any user is acceptable
|
||||
:param str|None branch: The branch name to specifically check; if `None` is
|
||||
provided, all branches are allowed; this is the default
|
||||
:param str|None workflow_filename: The filename of the workflow; if `None` is
|
||||
provided, any workflow name is acceptable; the default is ``"build.yml"``
|
||||
:param int rerun_level: The level at which rerun should occur (0 = none,
|
||||
1 = failed, 2 = all)
|
||||
:return: The requested runs conclusion
|
||||
:rtype: bool
|
||||
"""
|
||||
if not rerun_level:
|
||||
return False
|
||||
if rerun_level == 1:
|
||||
result = (
|
||||
run_gh_rest_check(lib_repo, user, branch, workflow_filename) == "success"
|
||||
)
|
||||
if rerun_level == 2 or not result:
|
||||
arg_dict = {}
|
||||
if user is not None:
|
||||
arg_dict["actor"] = user
|
||||
if branch is not None:
|
||||
arg_dict["branch"] = branch
|
||||
workflow: Workflow = lib_repo.get_workflow(workflow_filename)
|
||||
latest_run: WorkflowRun = workflow.get_runs(**arg_dict)[0]
|
||||
latest_run.rerun()
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def check_build_status(
|
||||
lib_repo: Repository,
|
||||
user: Optional[str] = None,
|
||||
branch: Optional[str] = None,
|
||||
workflow_filename: Optional[str] = "build.yml",
|
||||
debug: bool = False,
|
||||
) -> Optional[str]:
|
||||
"""Uses ``PyGithub`` to check the build statuses of the Adafruit
|
||||
CircuitPython Bundle
|
||||
|
||||
:param Repository lib_repo: The repo as a github.Repository.Repository object
|
||||
:param str|None user: The user that triggered the run; if `None` is
|
||||
provided, any user is acceptable
|
||||
:param str|None branch: The branch name to specifically check; if `None` is
|
||||
provided, all branches are allowed; this is the default
|
||||
:param str|None workflow_filename: The filename of the workflow; if `None`
|
||||
is provided, any workflow name is acceptable; the defail is `"build.yml"`
|
||||
:param bool debug: Whether debug statements should be printed to the standard
|
||||
output
|
||||
:return: The result of the workflow run, or ``None`` if it could not be
|
||||
determined
|
||||
:rtype: str|None
|
||||
"""
|
||||
|
||||
if debug:
|
||||
print("Checking", lib_repo.name)
|
||||
|
||||
if lib_repo.archived:
|
||||
return True
|
||||
|
||||
try:
|
||||
result = (
|
||||
run_gh_rest_check(lib_repo, user, branch, workflow_filename) == "success"
|
||||
)
|
||||
if debug and not result:
|
||||
print("***", "Library", lib_repo.name, "failed the patch!", "***")
|
||||
return result
|
||||
except GithubException:
|
||||
if debug:
|
||||
print(
|
||||
"???",
|
||||
"Library",
|
||||
lib_repo.name,
|
||||
"workflow could not be determined",
|
||||
"???",
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
def rerun_workflow(
|
||||
lib_repo: Repository,
|
||||
user: Optional[str] = None,
|
||||
branch: Optional[str] = None,
|
||||
workflow_filename: Optional[str] = "build.yml",
|
||||
rerun_level: int = 0,
|
||||
debug: bool = False,
|
||||
):
|
||||
"""Uses ``PyGithub`` to rerun the CI of the Adafruit
|
||||
CircuitPython Bundle repositories
|
||||
|
||||
:param Repository lib_repo: The repo as a github.Repository.Repository object
|
||||
:param str|None user: The user that triggered the run; if `None` is
|
||||
provided, any user is acceptable
|
||||
:param str|None branch: The branch name to specifically check; if `None` is
|
||||
provided, all branches are allowed; this is the default
|
||||
:param str|None workflow_filename: The filename of the workflow; if `None`
|
||||
is provided, any workflow name is acceptable; the defail is `"build.yml"`
|
||||
:param int rerun_level: The level at which rerun should occur (0 = none,
|
||||
1 = failed, 2 = all)
|
||||
:param bool debug: Whether debug statements should be printed to the standard
|
||||
output
|
||||
:return: The result of the workflow run, or ``None`` if it could not be
|
||||
determined
|
||||
:rtype: bool|None
|
||||
"""
|
||||
if lib_repo.archived:
|
||||
return False
|
||||
|
||||
try:
|
||||
result = run_gh_rest_rerun(
|
||||
lib_repo, user, branch, workflow_filename, rerun_level
|
||||
)
|
||||
if debug and result:
|
||||
print("***", "Library", lib_repo.name, "workflow was rerun!", "***")
|
||||
return result
|
||||
except GithubException:
|
||||
if debug:
|
||||
print(
|
||||
"???",
|
||||
"Library",
|
||||
lib_repo.name,
|
||||
"had an issue occur",
|
||||
"???",
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def check_build_statuses(
|
||||
gh_token: str,
|
||||
user: Optional[str] = None,
|
||||
branch: Optional[str] = "main",
|
||||
workflow_filename: Optional[str] = "build.yml",
|
||||
*,
|
||||
debug: bool = False,
|
||||
local_folder: str = "",
|
||||
) -> list[RemoteLibFunc_IterResult[bool]]:
|
||||
"""Checks all the libraries in the Adafruit CircuitPython Bundle to get the
|
||||
latest build status with the requested information
|
||||
|
||||
:param str gh_token: The Github token to be used for with the Github API
|
||||
:param str|None user: The user that triggered the run; if `None` is
|
||||
provided, any user is acceptable
|
||||
:param str|None branch: The branch name to specifically check; if `None` is
|
||||
provided, all branches are allowed; this is the default
|
||||
:param str|None workflow_filename: The filename of the workflow; if `None` is
|
||||
provided, any workflow name is acceptable; the defail is `"build.yml"`
|
||||
:param bool debug: Whether debug statements should be printed to
|
||||
the standard output
|
||||
:param str local_folder: A path to a local folder containing extra repositories
|
||||
:return: A list of tuples containing paired Repoistory objects and build
|
||||
statuses
|
||||
:rtype: list
|
||||
"""
|
||||
|
||||
return iter_remote_bundle_with_func(
|
||||
gh_token,
|
||||
[(check_build_status, (user, branch, workflow_filename), {"debug": debug})],
|
||||
local_folder=local_folder,
|
||||
)
|
||||
|
||||
|
||||
def rerun_workflows(
|
||||
gh_token: str,
|
||||
user: Optional[str] = None,
|
||||
branch: Optional[str] = "main",
|
||||
workflow_filename: Optional[str] = "build.yml",
|
||||
rerun_level: int = 0,
|
||||
*,
|
||||
debug: bool = False,
|
||||
local_folder: str = "",
|
||||
) -> list[RemoteLibFunc_IterResult[bool]]:
|
||||
"""Reruns the CI of all the libraries in the Adafruit CircuitPython Bundle.
|
||||
|
||||
:param str gh_token: The Github token to be used for with the Github API
|
||||
:param str|None user: The user that triggered the run; if `None` is
|
||||
provided, any user is acceptable
|
||||
:param str|None branch: The branch name to specifically check; if `None` is
|
||||
provided, all branches are allowed; this is the default
|
||||
:param str|None workflow_filename: The filename of the workflow; if `None` is
|
||||
provided, any workflow name is acceptable; the defail is `"build.yml"`
|
||||
:param int rerun_level: The level at which reruns should occur (0 = none,
|
||||
1 = failed, 2 = all)
|
||||
:param bool debug: Whether debug statements should be printed to
|
||||
the standard output
|
||||
:param str local_folder: A path to a local folder containing extra repositories
|
||||
:return: A list of tuples containing paired Repoistory objects and build
|
||||
statuses
|
||||
:rtype: list
|
||||
"""
|
||||
|
||||
return iter_remote_bundle_with_func(
|
||||
gh_token,
|
||||
[
|
||||
(
|
||||
rerun_workflow,
|
||||
(user, branch, workflow_filename, rerun_level),
|
||||
{"debug": debug},
|
||||
)
|
||||
],
|
||||
local_folder=local_folder,
|
||||
)
|
||||
|
||||
|
||||
def save_build_statuses(
|
||||
build_results: list[RemoteLibFunc_IterResult[bool]],
|
||||
failures_filepath: StrPath = "failures.txt",
|
||||
) -> None:
|
||||
"""Save the list of failed and/or errored libraries to files
|
||||
|
||||
:param list failed_builds: The list of workflow run results after
|
||||
iterating through the libraries
|
||||
:param StrPath failures_filepath: The filename/filepath to write the list
|
||||
of failed libraries to; the default is "failures.txt"
|
||||
"""
|
||||
|
||||
# Get failed builds
|
||||
bad_builds = [result[0].name for result in build_results if result[1][0]]
|
||||
|
||||
# Save the list of bad builds, if provided
|
||||
if bad_builds:
|
||||
with open(failures_filepath, mode="w", encoding="utf-8") as outputfile:
|
||||
for build in bad_builds:
|
||||
outputfile.write(build + "\n")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Check the CI status of the Bundle libraries"
|
||||
)
|
||||
parser.add_argument(
|
||||
"gh_token", metavar="GH_TOKEN", type=str, help="GitHub token with proper scopes"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--user",
|
||||
metavar="U",
|
||||
type=str,
|
||||
dest="user",
|
||||
default=None,
|
||||
help="Select a specific user that triggered the workflow",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--branch",
|
||||
metavar="B",
|
||||
type=str,
|
||||
dest="branch",
|
||||
default=None,
|
||||
help='Branch name; default is "main"',
|
||||
)
|
||||
parser.add_argument(
|
||||
"--workflow",
|
||||
metavar="W",
|
||||
type=str,
|
||||
dest="workflow",
|
||||
default="build.yml",
|
||||
help='Workflow name; default is "build.yml"',
|
||||
)
|
||||
parser.add_argument(
|
||||
"--debug", action="store_true", help="Print debug text during execution"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--rerun-level",
|
||||
metavar="R",
|
||||
type=int,
|
||||
dest="rerun_level",
|
||||
default=0,
|
||||
help="Level to rerun CI workflows (0 = none, 1 = failed, 2 = all)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--local-folder",
|
||||
metavar="L",
|
||||
type=str,
|
||||
dest="local_folder",
|
||||
default="",
|
||||
help="An additional folder to check and run",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.rerun_level:
|
||||
if args.debug:
|
||||
print("Rerunning workflows...")
|
||||
rerun_workflows(
|
||||
args.gh_token,
|
||||
args.user,
|
||||
args.branch,
|
||||
args.workflow,
|
||||
args.rerun_level,
|
||||
debug=args.debug,
|
||||
local_folder=args.local_folder,
|
||||
)
|
||||
if args.debug:
|
||||
print("Waiting 10 minutes to allow workflows to finish running...")
|
||||
time.sleep(600)
|
||||
|
||||
if args.debug:
|
||||
print("Checking workflows statuses...")
|
||||
results = check_build_statuses(
|
||||
args.gh_token,
|
||||
args.user,
|
||||
args.branch,
|
||||
args.workflow,
|
||||
debug=args.debug,
|
||||
local_folder=args.local_folder,
|
||||
)
|
||||
|
||||
fail_list = [
|
||||
repo_name.name for repo_name, repo_results in results if not repo_results[0]
|
||||
]
|
||||
|
||||
if fail_list:
|
||||
print(f'Failures for CI workflow "{args.workflow}":')
|
||||
for failure in fail_list:
|
||||
print(failure)
|
||||
RETURN_CODE = 1
|
||||
else:
|
||||
print(f"No failures for CI workflow: {args.workflow}!")
|
||||
RETURN_CODE = 0
|
||||
|
||||
raise SystemExit(RETURN_CODE)
|
||||
137
tools/docs_status.py
Normal file
137
tools/docs_status.py
Normal file
|
|
@ -0,0 +1,137 @@
|
|||
# SPDX-FileCopyrightText: 2022 Alec Delaney
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
|
||||
docs_status.py
|
||||
==============
|
||||
|
||||
Functionality for checking the ReadTheDocs build status for libraries
|
||||
in the Adafruit CircuitPython Bundle
|
||||
|
||||
* Author(s): Alec Delaney
|
||||
|
||||
"""
|
||||
|
||||
from typing import Any, Optional
|
||||
import argparse
|
||||
import time
|
||||
import parse
|
||||
import requests
|
||||
from github.Repository import Repository
|
||||
from github.ContentFile import ContentFile
|
||||
from iterate_libraries import (
|
||||
iter_remote_bundle_with_func,
|
||||
RemoteLibFunc_IterResult,
|
||||
)
|
||||
|
||||
|
||||
def check_docs_status(
|
||||
lib_repo: Repository, rtd_token: str, *, debug: bool = True
|
||||
) -> Optional[bool]:
|
||||
"""Checks a library for the latest documentation build status with the
|
||||
requested information
|
||||
|
||||
.. note::
|
||||
|
||||
The ReadTheDocs token must have sufficient privileges for accessing
|
||||
the API; therefore, only a maintainer can use this functionality.
|
||||
|
||||
:param str gh_token: The Github token to be used for with the Github
|
||||
API
|
||||
:param str rtd_token: A ReadTheDocs API token with sufficient privileges
|
||||
:param bool debug: Whether to use debug print statements
|
||||
:return: Whether the documentation built successfully; returns None if it
|
||||
could not be determined
|
||||
:rtype: bool|None
|
||||
"""
|
||||
|
||||
if debug:
|
||||
print("Checking", lib_repo.name)
|
||||
|
||||
# Get the README file contents
|
||||
content_file: ContentFile = lib_repo.get_contents("README.rst")
|
||||
readme_text = content_file.decoded_content.decode("utf-8")
|
||||
|
||||
# Parse for the ReadTheDocs slug
|
||||
search_results: parse.Result = parse.search(
|
||||
"https://readthedocs.org/projects/{slug:S}/badge", readme_text
|
||||
)
|
||||
rtd_slug: str = search_results.named["slug"]
|
||||
rtd_slug = rtd_slug.replace("_", "-", -1)
|
||||
|
||||
# GET the latest documentation build runs
|
||||
url = f"https://readthedocs.org/api/v3/projects/{rtd_slug}/builds/"
|
||||
headers = {"Authorization": f"token {rtd_token}"}
|
||||
response = requests.get(url, headers=headers)
|
||||
json_response: dict[str, Any] = response.json()
|
||||
|
||||
# Return the results of the latest run
|
||||
doc_build_results: Optional[list[dict[str, Any]]] = json_response.get(
|
||||
"results", None
|
||||
)
|
||||
if doc_build_results is None:
|
||||
return None
|
||||
result = doc_build_results[0].get("success")
|
||||
if debug and not result:
|
||||
print(f"RTD build failed or unavailable for {lib_repo.name}")
|
||||
time.sleep(3)
|
||||
return result
|
||||
|
||||
|
||||
def check_docs_statuses(
|
||||
gh_token: str, rtd_token: str
|
||||
) -> list[RemoteLibFunc_IterResult[Optional[bool]]]:
|
||||
"""Checks all the libraries in a cloned Adafruit CircuitPython Bundle
|
||||
to get the latest documentation build status with the requested
|
||||
information
|
||||
|
||||
.. note::
|
||||
|
||||
The ReadTheDocs token must have sufficient privileges for accessing
|
||||
the API; therefore, only a maintainer can use this functionality.
|
||||
|
||||
:param str gh_token: The Github token to be used for with the Github
|
||||
API
|
||||
:param str rtd_token: A ReadTheDocs API token with sufficient privileges
|
||||
:return: A list of tuples containing paired Repository objects and
|
||||
documentation build statuses
|
||||
:rtype: list
|
||||
"""
|
||||
|
||||
return iter_remote_bundle_with_func(
|
||||
gh_token, [(check_docs_status, (rtd_token,), {"debug": True})]
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Check the RTD docs build status of the Bundle libraries"
|
||||
)
|
||||
parser.add_argument(
|
||||
"gh_token", metavar="GH_TOKEN", type=str, help="GitHub token with proper scopes"
|
||||
)
|
||||
parser.add_argument(
|
||||
"rtd_token", metavar="RTD_TOKEN", type=str, help="ReadTheDocs token"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
results = check_docs_statuses(args.gh_token, args.rtd_token)
|
||||
fail_list = [
|
||||
repo_name.name
|
||||
for repo_name, repo_results in results
|
||||
if not repo_results[0] # pylint: disable=singleton-comparison
|
||||
]
|
||||
|
||||
if fail_list:
|
||||
print("Failures for RTD builds:")
|
||||
for failure in fail_list:
|
||||
print(failure)
|
||||
RETURN_CODE = 1
|
||||
else:
|
||||
print("No failures for RTD builds!")
|
||||
RETURN_CODE = 0
|
||||
|
||||
raise SystemExit(RETURN_CODE)
|
||||
136
tools/file_compare.py
Normal file
136
tools/file_compare.py
Normal file
|
|
@ -0,0 +1,136 @@
|
|||
# SPDX-FileCopyrightText: 2022 Eva Herrada
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
|
||||
file-compare.py
|
||||
===============
|
||||
|
||||
Functionality to compare a file across all Adafruit CircuitPython repos
|
||||
and output the text of the files along with which and how many repos use that
|
||||
exact file text.
|
||||
|
||||
* Author(s): Eva Herrada
|
||||
|
||||
"""
|
||||
import argparse
|
||||
from typing import Optional
|
||||
|
||||
import requests
|
||||
from requests.structures import CaseInsensitiveDict
|
||||
|
||||
from adabot.lib.common_funcs import list_repos
|
||||
|
||||
|
||||
def compare(git_file: str, token: Optional[str] = None) -> list:
|
||||
"""Uses requests to compare files across the adafruit org
|
||||
|
||||
.. note::
|
||||
|
||||
The GitHub API token is not necessary as long as all repos
|
||||
being accessed are public. However: it does make things easier
|
||||
as you won't get rate-limited quite as often
|
||||
|
||||
:param str git_file: The file to compare
|
||||
:param str|None token: The (optional but recommended) github API token
|
||||
:return: A list containing all the unique file texts, sorted from most to
|
||||
least common along with the repos that have that exact file text.
|
||||
:rtype: list
|
||||
"""
|
||||
|
||||
files = {}
|
||||
|
||||
all_repos = list_repos()
|
||||
print("Got Repos List")
|
||||
print(f"Repos found: {len(all_repos)}")
|
||||
|
||||
for repo in all_repos:
|
||||
name = repo["name"]
|
||||
url = f"https://raw.githubusercontent.com/adafruit/{name}/main/{git_file}"
|
||||
|
||||
if token:
|
||||
# If repo is private - we need to add a token in header:
|
||||
headers = CaseInsensitiveDict()
|
||||
headers["Authorization"] = f"token {token}"
|
||||
|
||||
resp = requests.get(url, headers=headers)
|
||||
else:
|
||||
resp = requests.get(url)
|
||||
|
||||
if resp.status_code != 200:
|
||||
print(name)
|
||||
print(resp.status_code)
|
||||
if resp.text not in files:
|
||||
files[resp.text] = [1, [repo["html_url"]]]
|
||||
else:
|
||||
files[resp.text][0] = files[resp.text][0] + 1
|
||||
files[resp.text][1].append(repo["html_url"])
|
||||
|
||||
top = 0
|
||||
sort = []
|
||||
for text, repos in files.items():
|
||||
if repos[0] >= top:
|
||||
sort.insert(0, [repos[0], text, repos[1]])
|
||||
top = repos[0]
|
||||
else:
|
||||
for i, val in enumerate(sort):
|
||||
if val[0] <= repos[0]:
|
||||
sort.insert(i, [repos[0], text, repos[1]])
|
||||
break
|
||||
|
||||
return sort
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Compare files across the adafruit CircuitPython repos",
|
||||
)
|
||||
parser.add_argument(
|
||||
"gh_token",
|
||||
metavar="GH_TOKEN",
|
||||
type=str,
|
||||
help="GitHub token with proper scopes",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--file",
|
||||
metavar="<FILE>",
|
||||
type=str,
|
||||
dest="file",
|
||||
required=True,
|
||||
help="File to compare",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-o",
|
||||
metavar="<OUTFILE>",
|
||||
type=str,
|
||||
dest="outfile",
|
||||
default=None,
|
||||
help="File to send output to",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
results = compare(args.file, args.gh_token)
|
||||
|
||||
for index, value in enumerate(results):
|
||||
print(f"##### {index+1}/{len(results)} #####")
|
||||
print(value[0])
|
||||
print("START OF FILE")
|
||||
print(value[1])
|
||||
print("END OF FILE")
|
||||
print(value[2])
|
||||
print()
|
||||
if args.outfile:
|
||||
with open(args.outfile, "w") as F:
|
||||
for index, value in enumerate(results):
|
||||
F.write(f"##### {index+1}/{len(results)} #####\n")
|
||||
F.write(f"{value[0]}\n")
|
||||
F.write("START OF FILE\n")
|
||||
F.write(f"{value[1]}\n")
|
||||
F.write("END OF FILE\n")
|
||||
for r in value[2]:
|
||||
F.write(r + "\n")
|
||||
F.write("\n")
|
||||
182
tools/find_text.py
Normal file
182
tools/find_text.py
Normal file
|
|
@ -0,0 +1,182 @@
|
|||
# SPDX-FileCopyrightText: 2022 Eva Herrada
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Tool for searching for text across all circuitpython libraries.
|
||||
Intended to be used to verify patches.
|
||||
|
||||
IMPORTANT: Must be run from the top-level adabot directory (one directory up
|
||||
from this one)
|
||||
|
||||
Type `python3 find_text.py -h` to figure out how to use.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import getopt
|
||||
import json
|
||||
import sys
|
||||
|
||||
import requests
|
||||
|
||||
from adabot.lib.common_funcs import list_repos
|
||||
|
||||
argumentList = sys.argv[1:]
|
||||
|
||||
OPTIONS = "ht:f:o:j"
|
||||
|
||||
long_options = ["help", "text=", "file=", "outfile=", "json"]
|
||||
|
||||
HELPMSG = """Usage:
|
||||
python3 find_text.py [-h | -t text | -f file | -o outfile | -j]
|
||||
Arguments:
|
||||
-h --help - Displays this message
|
||||
-t --text - (required) Text to check, can be used multiple times
|
||||
-f --file - (required) File to check for text for on github
|
||||
-o --outfile - (optional) Output file, prints output to stdout if variable
|
||||
is not set
|
||||
-j --json - Outputs in json instead of plain text"""
|
||||
|
||||
text = []
|
||||
FILE = None
|
||||
OUTFILE = None
|
||||
j = False
|
||||
|
||||
URL_TEMPLATE = "https://raw.githubusercontent.com/adafruit/{}/main/{}"
|
||||
RELEASE_TEMPLATE = "https://api.github.com/repos/adafruit/{}/releases/latest"
|
||||
|
||||
try:
|
||||
arguments, values = getopt.getopt(argumentList, OPTIONS, long_options)
|
||||
|
||||
for currentArgument, currentValue in arguments:
|
||||
if currentArgument in ("-h", "--help"):
|
||||
print(HELPMSG)
|
||||
sys.exit()
|
||||
|
||||
if currentArgument in ("-t", "--text"):
|
||||
print(f"Text: {currentValue}")
|
||||
text.append(currentValue)
|
||||
print(text)
|
||||
|
||||
if currentArgument in ("-f", "--file"):
|
||||
print(f"File: {currentValue}")
|
||||
FILE = currentValue
|
||||
|
||||
if currentArgument in ("-o", "--outfile"):
|
||||
OUTFILE = currentValue
|
||||
|
||||
if currentArgument in ("-j", "--json"):
|
||||
j = True
|
||||
|
||||
except getopt.error as err:
|
||||
print(str(err))
|
||||
|
||||
|
||||
if len(text) == 0 or FILE is None:
|
||||
if len(text) == 0:
|
||||
print("Please enter text to check for")
|
||||
if FILE is None:
|
||||
print("Please enter a file to search for the text in")
|
||||
print(HELPMSG)
|
||||
sys.exit()
|
||||
|
||||
RESULTS = {
|
||||
"file_not_found": [],
|
||||
"file_has_none": [],
|
||||
"file_has_all": [],
|
||||
}
|
||||
for i in range(len(text)):
|
||||
RESULTS[f"file_has_text_{i}"] = []
|
||||
|
||||
|
||||
def delete_multiple_lines(n=1):
|
||||
"""Delete the last line in the STDOUT."""
|
||||
for _ in range(n):
|
||||
sys.stdout.write("\x1b[1A") # cursor up one line
|
||||
sys.stdout.write("\x1b[2K") # delete the last line
|
||||
|
||||
|
||||
def prettyprint(info, results):
|
||||
"""Prints info about current repo and result of search"""
|
||||
print("┌" + "─" * (len(info) + 4) + "┐")
|
||||
print("│ ", info, " │")
|
||||
for res in results:
|
||||
print("│ ", res, " " * (len(info) - (len(res) - 9)), "│")
|
||||
print("└" + "─" * (len(info) + 4) + "┘")
|
||||
delete_multiple_lines(3 + len(results))
|
||||
|
||||
|
||||
try:
|
||||
with open("repos.json", "r") as f:
|
||||
LAST_RUN = f.readline().rstrip()
|
||||
except FileNotFoundError:
|
||||
LAST_RUN = ""
|
||||
|
||||
print(f"Last run: {LAST_RUN}")
|
||||
if LAST_RUN != str(datetime.date.today()):
|
||||
with open("repos.json", "w") as f:
|
||||
print("Fetching Repos List")
|
||||
all_repos = list_repos()
|
||||
print("Got Repos List")
|
||||
f.write(str(datetime.date.today()) + "\n")
|
||||
f.write(json.dumps(all_repos))
|
||||
|
||||
with open("repos.json", "r") as f:
|
||||
all_repos = json.loads(f.read().split("\n")[1])
|
||||
|
||||
print(f"Repos found: {len(all_repos)}")
|
||||
|
||||
|
||||
for repo in all_repos:
|
||||
INFO = "getting {} for: {}".format(FILE, repo["name"])
|
||||
response = requests.get(URL_TEMPLATE.format(repo["name"], FILE))
|
||||
result = []
|
||||
if response.status_code == 404:
|
||||
RESULTS["file_not_found"].append(repo["html_url"])
|
||||
result.append("\033[91mFile not found\033[0m")
|
||||
else:
|
||||
tracker = [False for i in range(len(text))]
|
||||
for index, item in enumerate(text):
|
||||
if item in response.text:
|
||||
tracker[index] = True
|
||||
|
||||
if all(tracker):
|
||||
result = ["\033[92mFound all text\033[0m"]
|
||||
RESULTS["file_has_all"].append(repo["html_url"])
|
||||
elif not any(tracker):
|
||||
result = ["\033[91mDid not find any text\033[0m"]
|
||||
RESULTS["file_has_none"].append(repo["html_url"])
|
||||
for index, item in enumerate(tracker):
|
||||
if item:
|
||||
result.append(f"\033[93mFound text {index}\033[0m")
|
||||
RESULTS[f"file_has_text_{index}"].append(repo["html_url"])
|
||||
else:
|
||||
result.append(f"\033[93mDid not find text {index}\033[0m")
|
||||
|
||||
prettyprint(INFO, result)
|
||||
|
||||
if j:
|
||||
if OUTFILE is not None:
|
||||
with open(OUTFILE, "w") as F:
|
||||
F.write(json.dumps(RESULTS))
|
||||
else:
|
||||
print(json.dumps(RESULTS))
|
||||
else:
|
||||
if OUTFILE is not None:
|
||||
with open(OUTFILE, "w") as F:
|
||||
for k, v in RESULTS.items():
|
||||
F.write(k + "\n")
|
||||
for i in v:
|
||||
F.write(i + "\n")
|
||||
F.write("\n")
|
||||
else:
|
||||
for k, v in RESULTS.items():
|
||||
print(k)
|
||||
for i in v:
|
||||
print(i)
|
||||
|
||||
|
||||
print("┌" + "─" * 30 + "┐")
|
||||
for k, v in RESULTS.items():
|
||||
print("│ ", k, len(v), " " * (24 - (len(k) + len(str(len(v))))), " │")
|
||||
print("└" + "─" * 30 + "┘")
|
||||
153
tools/git_functionality.py
Normal file
153
tools/git_functionality.py
Normal file
|
|
@ -0,0 +1,153 @@
|
|||
# SPDX-FileCopyrightText: 2022 Alec Delaney
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
|
||||
git_functionality.py
|
||||
====================
|
||||
|
||||
Functionality for using GitPython to fetch, pull, commit, push, etc. to
|
||||
cloned repos and their remotes
|
||||
|
||||
* Author(s): Alec Delaney
|
||||
|
||||
"""
|
||||
|
||||
import functools
|
||||
from typing import Any
|
||||
import git
|
||||
import git.repo
|
||||
import git.index.base
|
||||
from library_functions import StrPath
|
||||
|
||||
|
||||
def _get_repo_and_remote(
|
||||
lib_path: StrPath, remote_name: str = "origin"
|
||||
) -> tuple[git.repo.Repo, git.Remote]:
|
||||
"""
|
||||
Get the repo and remote objects
|
||||
|
||||
:param StrPath lib_path: The path to the repo
|
||||
:param str remote_name: (optional) The name of the remote,
|
||||
default is ``origin``
|
||||
:return: The repo and remote objects
|
||||
:rtype: tuple
|
||||
"""
|
||||
|
||||
# Create the repo and remote objects
|
||||
repo = git.repo.Repo(lib_path)
|
||||
remote = repo.remote(remote_name)
|
||||
|
||||
return repo, remote
|
||||
|
||||
|
||||
def sync_and_checkout(
|
||||
lib_path: StrPath, remote_name: str = "origin", branch_name: str = "main"
|
||||
) -> None:
|
||||
"""
|
||||
Update the repo, and ensure it is on the given branch using a
|
||||
forced checkout
|
||||
|
||||
:param StrPath lib_path: The path to the repo
|
||||
:param str remote_name: The remote name to fetch and pull,
|
||||
default is ``origin``
|
||||
:param str branch_name: The branch name to checkout, default
|
||||
is ``main``
|
||||
"""
|
||||
|
||||
# Create the repo and remote objects
|
||||
repo, remote = _get_repo_and_remote(lib_path, remote_name)
|
||||
|
||||
# Fetch from the remote
|
||||
remote.fetch()
|
||||
|
||||
# Checkout and pull to the given branchb
|
||||
# if repo.active_branch != branch_name:
|
||||
branch: git.Head = getattr(repo.heads, branch_name)
|
||||
branch.checkout(force=True)
|
||||
remote.pull()
|
||||
|
||||
|
||||
def push_changes(lib_path: StrPath, remote_name: str = "origin") -> None:
|
||||
"""
|
||||
Pushes any changes made to the repo to the given remote
|
||||
|
||||
:param StrPath lib_path: The path to the repo
|
||||
:param str remote_name: (optional) The name of remote, default
|
||||
is ``main``
|
||||
"""
|
||||
|
||||
# Create the repo and remote objects
|
||||
_, remote = _get_repo_and_remote(lib_path, remote_name)
|
||||
|
||||
# Push changes
|
||||
remote.push()
|
||||
|
||||
|
||||
def commit_changes(
|
||||
lib_path: StrPath,
|
||||
message: str,
|
||||
remote_name: str = "origin",
|
||||
skip_hooks: bool = True,
|
||||
) -> None:
|
||||
"""
|
||||
Stage all files and commit them
|
||||
|
||||
:param StrPath lib_path: The path to the repo
|
||||
:param str message: The commit message
|
||||
:param str remote_name: (optional) The name of the remote,
|
||||
default is ``origin``
|
||||
:param bool skip_hooks: (optional) Whether commit hooks should be
|
||||
skipped; default is True
|
||||
"""
|
||||
|
||||
# Create the repo and remote objects
|
||||
repo, _ = _get_repo_and_remote(lib_path, remote_name)
|
||||
|
||||
# Add all the files and commit them
|
||||
index_file = git.index.base.IndexFile(repo)
|
||||
index_file.add("*")
|
||||
index_file.commit(message, skip_hooks=skip_hooks)
|
||||
|
||||
|
||||
def sync_commit_push(
|
||||
message: str,
|
||||
*,
|
||||
remote_name: str = "origin",
|
||||
branch_name: str = "main",
|
||||
skip_hooks: bool = True
|
||||
):
|
||||
"""
|
||||
Decorator for automatically fetching, pulling, and pushing changes
|
||||
for a library function
|
||||
|
||||
:param str message: The commit message
|
||||
:param str remote_name: (optional) The name of the remote, default
|
||||
is ``origin``
|
||||
:param str branch_name: (optional) The name of the branch, default
|
||||
is ``main``
|
||||
:param bool skip_hooks: (optional) Whether to skip the commit hooks,
|
||||
default is ``True``
|
||||
"""
|
||||
|
||||
def decorator_sync_commit_push(func):
|
||||
functools.wraps(func)
|
||||
|
||||
def wrapper_sync_commit_push(lib_path: StrPath, *args, **kwargs) -> Any:
|
||||
# Fetch and pull to repo
|
||||
sync_and_checkout(lib_path, remote_name, branch_name)
|
||||
|
||||
# Run fucntion
|
||||
result = func(lib_path, *args, **kwargs)
|
||||
|
||||
# Commit and push changes
|
||||
commit_changes(lib_path, message, remote_name, skip_hooks)
|
||||
push_changes(lib_path, remote_name)
|
||||
|
||||
# Return the function result(s)
|
||||
return result
|
||||
|
||||
return wrapper_sync_commit_push
|
||||
|
||||
return decorator_sync_commit_push
|
||||
191
tools/iterate_libraries.py
Normal file
191
tools/iterate_libraries.py
Normal file
|
|
@ -0,0 +1,191 @@
|
|||
# SPDX-FileCopyrightText: 2022 Alec Delaney
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
|
||||
iterate_libraries.py
|
||||
====================
|
||||
|
||||
Functionality for iterating through a cloned Adafruit CircuitPython
|
||||
Bundle to run functions on each library
|
||||
|
||||
* Author(s): Alec Delaney
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import glob
|
||||
import pathlib
|
||||
from collections.abc import Sequence, Iterable
|
||||
from typing import TypeVar, Any, Union, List
|
||||
from typing_extensions import TypeAlias
|
||||
import parse
|
||||
from github import Github
|
||||
from github.Repository import Repository
|
||||
from github.ContentFile import ContentFile
|
||||
from library_functions import StrPath, LocalLibFunc, RemoteLibFunc
|
||||
|
||||
# Helpful type annotapython generic type aliastion definitions
|
||||
|
||||
PosArg = TypeVar("PosArg")
|
||||
KeyArg = TypeVar("KeyArg")
|
||||
RetArg = TypeVar("RetArg")
|
||||
|
||||
LocalLibFunc_IterInstruction: TypeAlias = tuple[
|
||||
LocalLibFunc, Sequence[PosArg], dict[str, KeyArg]
|
||||
]
|
||||
"""Instruction set as a tuple of a function to run on a local library,
|
||||
a list of the positional arguments to be provided to it, and a
|
||||
dictionary of keyword arguments to be provided to it. You do not need
|
||||
to include the libray path as an argument, as it is automatically
|
||||
supplied."""
|
||||
|
||||
LocalLibFunc_IterResult: TypeAlias = tuple[StrPath, list[RetArg]]
|
||||
"""Result of function(s) run on a library as a tuple of the path to
|
||||
the local library modified and a list of the result(s) of the
|
||||
function(s)"""
|
||||
|
||||
RemoteLibFunc_IterInstruction: TypeAlias = tuple[
|
||||
RemoteLibFunc, Sequence[PosArg], dict[str, KeyArg]
|
||||
]
|
||||
"""Instruction set as a tuple of a function to run on a remote library,
|
||||
a list of the positional arguments to be provided to it, and a
|
||||
dictionary of keyword arguments to be provided to it. You do not need
|
||||
to include the Repository object as an argument, as it is autmoatically
|
||||
supplied."""
|
||||
|
||||
RemoteLibFunc_IterResult: TypeAlias = tuple[Repository, list[RetArg]]
|
||||
"""Result of function(s) run on a library as a tuple of the name of
|
||||
the remote library modified and a list of the result(s) of the
|
||||
function(s)"""
|
||||
|
||||
|
||||
# Global Variables
|
||||
|
||||
_BUNDLE_BRANCHES = ("drivers", "helpers")
|
||||
|
||||
|
||||
def perform_func(
|
||||
item: Any,
|
||||
func_workflow: Union[RemoteLibFunc_IterInstruction, LocalLibFunc_IterInstruction],
|
||||
) -> Union[List[RemoteLibFunc_IterResult], List[LocalLibFunc_IterResult]]:
|
||||
"""
|
||||
Perform the given function
|
||||
"""
|
||||
func_results = []
|
||||
for func, args, kwargs in func_workflow:
|
||||
result = func(item, *args, **kwargs)
|
||||
func_results.append(result)
|
||||
return func_results
|
||||
|
||||
|
||||
def iter_local_bundle_with_func(
|
||||
bundle_path: StrPath,
|
||||
func_workflow: Iterable[LocalLibFunc_IterInstruction],
|
||||
*,
|
||||
local_folder: str = "",
|
||||
) -> list[LocalLibFunc_IterResult]:
|
||||
"""Iterate through the libraries and run a given function with the
|
||||
provided arguments
|
||||
|
||||
:param StrPath bundle_path: The path to the cloned bundle
|
||||
:param Iterable func_workflow: An iterable of tuples containing pairs
|
||||
of functions and corresponding arguments; the path to each specific
|
||||
library is automatically provided to the functions, so the functions
|
||||
must account for it
|
||||
:return: A list containing tuples of pairs of each library path and a list
|
||||
with the results from each function
|
||||
:rtype: list
|
||||
"""
|
||||
|
||||
# Initialize list of results
|
||||
results = []
|
||||
|
||||
# Keep track of all libraries iterated
|
||||
iterated = set()
|
||||
|
||||
# Loop through each bundle branch
|
||||
for branch_name in _BUNDLE_BRANCHES:
|
||||
libraries_glob_path = os.path.join(bundle_path, "libraries", branch_name, "*")
|
||||
libraries_path_list = glob.glob(libraries_glob_path)
|
||||
|
||||
# Enter each library in the bundle
|
||||
for library_path in libraries_path_list:
|
||||
iterated.add(os.path.split(library_path)[1].lower())
|
||||
func_results = perform_func(library_path, func_workflow)
|
||||
|
||||
results.append((library_path, func_results))
|
||||
|
||||
if local_folder:
|
||||
additional = {
|
||||
os.path.split(pathname)[1].lower()
|
||||
for pathname in glob.glob(os.path.join(local_folder, "*"))
|
||||
}
|
||||
diff = additional.difference(iterated)
|
||||
for unused in diff:
|
||||
unused_func_results = perform_func(unused, func_workflow)
|
||||
results.append((unused, unused_func_results))
|
||||
|
||||
return results
|
||||
|
||||
|
||||
# pylint: disable=too-many-locals
|
||||
def iter_remote_bundle_with_func(
|
||||
gh_token: str,
|
||||
func_workflow: RemoteLibFunc_IterInstruction,
|
||||
*,
|
||||
local_folder: str = "",
|
||||
) -> list[RemoteLibFunc_IterResult]:
|
||||
"""Iterate through the remote bundle, accessing each library's git repo
|
||||
using the GitHub RESTful API (specifically using ``PyGithub``)
|
||||
|
||||
:param str gh_token: A GitHub token with proper scopes
|
||||
:param Iterable func_workflow: An iterable of tuples containing pairs
|
||||
of functions and corresponding arguments; the path to each specific
|
||||
library is automatically provided to the functions, so the functions
|
||||
must account for it
|
||||
:return: A list containing tuples of pairs of each library path and a list
|
||||
with the results from each function
|
||||
:rtype: list
|
||||
"""
|
||||
|
||||
# Get the Github repo object
|
||||
github_client = Github(gh_token)
|
||||
bundle_repo = github_client.get_repo("adafruit/Adafruit_CircuitPython_Bundle")
|
||||
|
||||
# Initialize list of results
|
||||
results = []
|
||||
|
||||
# Keep track of all libraries iterated
|
||||
iterated = set()
|
||||
|
||||
# Loop through each bundle branch
|
||||
for branch_name in _BUNDLE_BRANCHES:
|
||||
branch_repos_path = "/".join(("libraries", branch_name))
|
||||
branch_repos: list[ContentFile] = bundle_repo.get_contents(branch_repos_path)
|
||||
|
||||
# Enter each library in the bundle
|
||||
for repo_file in branch_repos:
|
||||
repo_name_result: parse.Result = parse.search(
|
||||
"repos/adafruit/{repo_name:w}/", repo_file.git_url
|
||||
)
|
||||
repo_name: str = repo_name_result.named["repo_name"]
|
||||
|
||||
repo = github_client.get_repo(f"adafruit/{repo_name}")
|
||||
iterated.add(repo_name.lower())
|
||||
|
||||
func_results = perform_func(repo, func_workflow)
|
||||
results.append((repo, func_results))
|
||||
|
||||
if local_folder:
|
||||
additional = {
|
||||
path.name.lower() for path in pathlib.Path(local_folder).glob("*")
|
||||
}
|
||||
diff = additional.difference(iterated)
|
||||
for unused in diff:
|
||||
unused_repo = github_client.get_repo(f"adafruit/{unused}")
|
||||
unused_func_results = perform_func(unused_repo, func_workflow)
|
||||
results.append((unused_repo, unused_func_results))
|
||||
|
||||
return results
|
||||
80
tools/library_functions.py
Normal file
80
tools/library_functions.py
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
# SPDX-FileCopyrightText: 2022 Alec Delaney
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
|
||||
lib_funcs.py
|
||||
============
|
||||
|
||||
Library-specific functionalities to aid in developing patches
|
||||
|
||||
* Author(s): Alec Delaney
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import functools
|
||||
from collections.abc import Sequence
|
||||
from typing import Protocol, Any, Union
|
||||
from typing_extensions import TypeAlias
|
||||
from github.Repository import Repository
|
||||
|
||||
# Helpful type annotation for path-like strings
|
||||
StrPath: TypeAlias = Union[str, os.PathLike[str]]
|
||||
"""Path or path-like strings"""
|
||||
|
||||
|
||||
# pylint: disable=too-few-public-methods
|
||||
class LocalLibFunc(Protocol):
|
||||
"""Typing protocol for methods (or callables) that take the following
|
||||
parameters:
|
||||
|
||||
- (StrPath) The path to a specific Adafruit library
|
||||
- (Sequence[Any]) A list of any positional arguments
|
||||
- (Dict[str, Any]) A dict of any keyword arguments
|
||||
"""
|
||||
|
||||
def __call__(
|
||||
self, lib_path: StrPath, *args: Sequence[Any], **kwargs: dict[str, Any]
|
||||
) -> Any:
|
||||
...
|
||||
|
||||
|
||||
# pylint: disable=too-few-public-methods
|
||||
class RemoteLibFunc(Protocol):
|
||||
"""Typing protocol for methods (or callables) that take the following
|
||||
parameters:
|
||||
|
||||
- (Repository) The repo as a github.Repository.Repository object
|
||||
- (Sequence[Any]) A list of any positional arguments
|
||||
- (Dict[str, Any]) A dict of any keyword arguments
|
||||
"""
|
||||
|
||||
def __call__(
|
||||
self, lib_repo: Repository, *args: Sequence[Any], **kwargs: dict[str, Any]
|
||||
) -> Any:
|
||||
...
|
||||
|
||||
|
||||
def in_lib_path(func: LocalLibFunc) -> LocalLibFunc:
|
||||
"""Decorator for automating temporarily entering a function's
|
||||
library directory
|
||||
|
||||
:param LibraryFunc func: The library function to decorate
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapper_use_lib_path(lib_path: StrPath, *args, **kwargs) -> Any:
|
||||
# Get the current directory
|
||||
current_path = os.getcwd()
|
||||
|
||||
# Enter the library directory for the duration of executing the function
|
||||
os.chdir(lib_path)
|
||||
result = func(lib_path, *args, **kwargs)
|
||||
os.chdir(current_path)
|
||||
|
||||
# Return the result of the function
|
||||
return result
|
||||
|
||||
return wrapper_use_lib_path
|
||||
17
tools/run_black.sh
Normal file
17
tools/run_black.sh
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
# SPDX-FileCopyrightText: 2023 Alec Delaney
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
rm -rf .gitlibs
|
||||
mkdir .gitlibs
|
||||
cd .libraries
|
||||
for repo in *; do
|
||||
cd ../.gitlibs
|
||||
git clone https://github.com/adafruit/$repo.git
|
||||
cd $repo
|
||||
pre-commit run --all-files
|
||||
git add -A
|
||||
git commit -m "Run pre-commit"
|
||||
git push
|
||||
cd ..
|
||||
done
|
||||
98
tools/runner.py
Normal file
98
tools/runner.py
Normal file
|
|
@ -0,0 +1,98 @@
|
|||
# SPDX-FileCopyrightText: 2022 Eva Herrada
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Tool for running specific CircuitPython library validators one at a time.
|
||||
|
||||
IMPORTANT: Must be run from the top-level adabot directory (one directory up
|
||||
from this one)
|
||||
|
||||
Type `python3 runner.py` to run this file, and select the validator you want
|
||||
to run
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import inspect
|
||||
import json
|
||||
|
||||
from adabot import pypi_requests as pypi
|
||||
from adabot.lib import circuitpython_library_validators as cpy_vals
|
||||
from adabot.lib import common_funcs
|
||||
from adabot.lib.common_funcs import list_repos
|
||||
|
||||
default_validators = [
|
||||
vals[1]
|
||||
for vals in inspect.getmembers(cpy_vals.LibraryValidator)
|
||||
if vals[0].startswith("validate")
|
||||
]
|
||||
|
||||
bundle_submodules = common_funcs.get_bundle_submodules()
|
||||
|
||||
LATEST_PYLINT = ""
|
||||
pylint_info = pypi.get("/pypi/pylint/json")
|
||||
if pylint_info and pylint_info.ok:
|
||||
LATEST_PYLINT = pylint_info.json()["info"]["version"]
|
||||
|
||||
validator = cpy_vals.LibraryValidator(
|
||||
default_validators,
|
||||
bundle_submodules,
|
||||
LATEST_PYLINT,
|
||||
)
|
||||
|
||||
valids = {}
|
||||
for count, val in enumerate(default_validators):
|
||||
t = str(val).split(" at", maxsplit=1)[0].split("Validator.", maxsplit=1)[1]
|
||||
valids[count] = t
|
||||
print(f"{count}:", t)
|
||||
|
||||
select = valids[
|
||||
int(input(f"Select a function to run [0-{len(default_validators)-1}]: "))
|
||||
]
|
||||
print(select)
|
||||
selected = getattr(validator, select)
|
||||
print(selected)
|
||||
|
||||
try:
|
||||
with open("repos.json", "r") as f:
|
||||
DATE = f.readline().rstrip()
|
||||
except FileNotFoundError:
|
||||
DATE = ""
|
||||
|
||||
print(f"Last run: {DATE}")
|
||||
if DATE != str(datetime.date.today()):
|
||||
with open("repos.json", "w") as f:
|
||||
print("Fetching Repos List")
|
||||
all_repos = list_repos()
|
||||
print("Got Repos List")
|
||||
f.write(str(datetime.date.today()) + "\n")
|
||||
f.write(json.dumps(all_repos))
|
||||
|
||||
with open("repos.json", "r") as f:
|
||||
all_repos = json.loads(f.read().split("\n")[1])
|
||||
|
||||
results = {}
|
||||
|
||||
for repo in all_repos:
|
||||
val = selected(repo)
|
||||
print(repo["name"])
|
||||
print(val)
|
||||
if len(val):
|
||||
if isinstance(val[0], tuple):
|
||||
if val[0][0] not in results:
|
||||
results[val[0][0]] = []
|
||||
results[val[0][0]].append(repo["name"])
|
||||
else:
|
||||
for i in val:
|
||||
if i not in results:
|
||||
results[i] = []
|
||||
results[i].append(repo["name"])
|
||||
|
||||
|
||||
print(results)
|
||||
with open("adabot_run.txt", "w") as f:
|
||||
for k, v in results.items():
|
||||
f.write(k + "\n")
|
||||
for i in v:
|
||||
f.write(i + "\n")
|
||||
f.write("\n")
|
||||
Loading…
Reference in a new issue