Compare commits
7 commits
bb920f9474
...
824d6273a6
Author | SHA1 | Date | |
---|---|---|---|
ItsDrike | 824d6273a6 | ||
ItsDrike | 06cbefb631 | ||
ItsDrike | 703fd330eb | ||
ItsDrike | bd9af98819 | ||
ItsDrike | 8b78208f58 | ||
ItsDrike | aee9ecba03 | ||
ItsDrike | 7e4c0525c3 |
22
.github/CODEOWNERS
vendored
Normal file
22
.github/CODEOWNERS
vendored
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
# If a PR changes a file that has a code owner specified, this code owner
|
||||||
|
# is automatically requested a review from
|
||||||
|
|
||||||
|
# GitHub CI
|
||||||
|
.github/dependabot.yml @ItsDrike
|
||||||
|
.github/workflows/** @ItsDrike
|
||||||
|
.github/scripts/** @ItsDrike
|
||||||
|
.codeclimate.yml
|
||||||
|
|
||||||
|
# Meta (config files for the repo itself)
|
||||||
|
.github/CODEOWNERS @ItsDrike
|
||||||
|
.github/ISSUE_TEMPLATE/** @ItsDrike
|
||||||
|
.github/pull_request_template.md @ItsDrike
|
||||||
|
|
||||||
|
# Project's README/documents
|
||||||
|
README.md @ItsDrike
|
||||||
|
CODE-OF-CONDUCT.md @ItsDrike
|
||||||
|
CONTRIBUTING.md @ItsDrike
|
||||||
|
ATTRIBUTION.md @ItsDrike
|
||||||
|
LICENSE.txt @ItsDrike
|
||||||
|
LICENSE-THIRD-PARTY.txt @ItsDrike
|
||||||
|
SECURITY.md @ItsDrike
|
75
.github/ISSUE_TEMPLATES/bug_report.yml
vendored
Normal file
75
.github/ISSUE_TEMPLATES/bug_report.yml
vendored
Normal file
|
@ -0,0 +1,75 @@
|
||||||
|
name: Bug report
|
||||||
|
description: Found a bug? Let us know so we can fix it!
|
||||||
|
labels: ["type: bug"]
|
||||||
|
|
||||||
|
body:
|
||||||
|
- type: textarea
|
||||||
|
id: reproduction-steps
|
||||||
|
attributes:
|
||||||
|
label: Steps to reproduce
|
||||||
|
description: Steps to reproduce the bug. This can also be a code snippet. Steps to reproduce the bug. This can also be a code snippet.
|
||||||
|
value: |
|
||||||
|
1.
|
||||||
|
2.
|
||||||
|
3.
|
||||||
|
4.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: expected-result
|
||||||
|
attributes:
|
||||||
|
label: Expected result
|
||||||
|
description: What should have happened if the bug wasn't there?
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: actual-result
|
||||||
|
attributes:
|
||||||
|
label: Actual result
|
||||||
|
description: What happened exactly? If you have a traceback, please provide all of it.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: input
|
||||||
|
id: library-version
|
||||||
|
attributes:
|
||||||
|
label: Library version
|
||||||
|
description: mcproto version used when this bug was encountered. (Find out with `pip show mcproto` command)
|
||||||
|
placeholder: 0.1.0
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: input
|
||||||
|
id: python-version
|
||||||
|
attributes:
|
||||||
|
label: Python version
|
||||||
|
description: Version of python interpreter you're using. (Find out with `python -V` or `py -V`)
|
||||||
|
placeholder: 3.11.1
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: input
|
||||||
|
id: operating-system
|
||||||
|
attributes:
|
||||||
|
label: Operating system
|
||||||
|
description: Operating system used when this bug was encountered.
|
||||||
|
placeholder: Windows 11 / Linux - Ubuntu 22.10 / MacOS / ...
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: further-info
|
||||||
|
attributes:
|
||||||
|
label: Further info
|
||||||
|
description: Any further info or images go here.
|
||||||
|
|
||||||
|
- type: checkboxes
|
||||||
|
id: checklist
|
||||||
|
attributes:
|
||||||
|
label: Checklist
|
||||||
|
description: Make sure to tick all the following boxes.
|
||||||
|
options:
|
||||||
|
- label: I have searched the issue tracker and have made sure it's not a duplicate. If it is a follow up of another issue, I have specified it.
|
||||||
|
required: true
|
||||||
|
- label: I have made sure to remove ANY sensitive information (passwords, credentials, personal details, etc.).
|
||||||
|
required: true
|
1
.github/ISSUE_TEMPLATES/config.yml
vendored
Normal file
1
.github/ISSUE_TEMPLATES/config.yml
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
blank_issues_enabled: true
|
36
.github/ISSUE_TEMPLATES/feature_request.yml
vendored
Normal file
36
.github/ISSUE_TEMPLATES/feature_request.yml
vendored
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
name: Feature request
|
||||||
|
description: Got a cool idea you would like implemented? Share it with us!
|
||||||
|
labels: ["type: feature"]
|
||||||
|
|
||||||
|
body:
|
||||||
|
- type: textarea
|
||||||
|
id: summary
|
||||||
|
attributes:
|
||||||
|
label: Summary
|
||||||
|
description: Small summary of the feature.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: problem
|
||||||
|
attributes:
|
||||||
|
label: Why is this needed?
|
||||||
|
description: Why should this feature be implemented? What problem(s) would it solve?
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: ideal-implementation
|
||||||
|
attributes:
|
||||||
|
label: Ideal implementation
|
||||||
|
description: How should this feature be implemented?
|
||||||
|
value: To be decided.
|
||||||
|
|
||||||
|
- type: checkboxes
|
||||||
|
id: checklist
|
||||||
|
attributes:
|
||||||
|
label: Checklist
|
||||||
|
description: Make sure to tick all the following boxes.
|
||||||
|
options:
|
||||||
|
- label: I have searched the issue tracker and have made sure it's not a duplicate. If it is a follow up of another issue, I have specified it.
|
||||||
|
required: true
|
25
.github/dependabot.yml
vendored
Normal file
25
.github/dependabot.yml
vendored
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
# To get started with Dependabot version updates, you'll need to specify which
|
||||||
|
# package ecosystems to update and where the package manifests are located.
|
||||||
|
# Please see the documentation for all configuration options:
|
||||||
|
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||||
|
|
||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
directory: "/" # Location of package manifests
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
#labels:
|
||||||
|
# - "a: dependencies"
|
||||||
|
# - "p: 3 - low"
|
||||||
|
# - "t: enhancement"
|
||||||
|
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
#labels:
|
||||||
|
# - "a: dependencies"
|
||||||
|
# - "a: CI"
|
||||||
|
# - "p: 3 - low"
|
||||||
|
# - "t: enhancement"
|
11
.github/scripts/normalize_coverage.py
vendored
Normal file
11
.github/scripts/normalize_coverage.py
vendored
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
|
connection = sqlite3.connect(".coverage")
|
||||||
|
|
||||||
|
# Normalize windows paths
|
||||||
|
connection.execute("UPDATE file SET path = REPLACE(path, '\\', '/')")
|
||||||
|
|
||||||
|
connection.commit()
|
||||||
|
connection.close()
|
37
.github/workflows/dependabot-auto-merge.yml
vendored
Normal file
37
.github/workflows/dependabot-auto-merge.yml
vendored
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
---
|
||||||
|
name: Dependabot auto-merge
|
||||||
|
on: pull_request_target
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
dependabot:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.actor == 'dependabot[bot]'
|
||||||
|
steps:
|
||||||
|
- name: Generate token
|
||||||
|
id: app-token
|
||||||
|
uses: actions/create-github-app-token@v1
|
||||||
|
with:
|
||||||
|
app-id: ${{ secrets.APP_ID }}
|
||||||
|
private-key: ${{ secrets.PRIVATE_KEY }}
|
||||||
|
|
||||||
|
- name: Dependabot metadata
|
||||||
|
id: metadata
|
||||||
|
uses: dependabot/fetch-metadata@v2
|
||||||
|
with:
|
||||||
|
github-token: "${{ steps.app-token.outputs.token }}"
|
||||||
|
|
||||||
|
- name: Approve a PR
|
||||||
|
run: gh pr review --approve "$PR_URL"
|
||||||
|
env:
|
||||||
|
PR_URL: ${{ github.event.pull_request.html_url }}
|
||||||
|
GITHUB_TOKEN: ${{ steps.app-token.outputs.token }}
|
||||||
|
|
||||||
|
- name: Enable auto-merge for Dependabot PRs
|
||||||
|
run: gh pr merge --auto --squash "$PR_URL"
|
||||||
|
env:
|
||||||
|
PR_URL: ${{ github.event.pull_request.html_url }}
|
||||||
|
GITHUB_TOKEN: ${{ steps.app-token.outputs.token }}
|
43
.github/workflows/main.yml
vendored
Normal file
43
.github/workflows/main.yml
vendored
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
---
|
||||||
|
name: CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
# Cancel already running workflows if new ones are scheduled
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validation:
|
||||||
|
uses: ./.github/workflows/validation.yml
|
||||||
|
|
||||||
|
unit-tests:
|
||||||
|
uses: ./.github/workflows/unit-tests.yml
|
||||||
|
|
||||||
|
# Produce a pull request payload artifact with various data about the
|
||||||
|
# pull-request event (such as the PR number, title, author, ...).
|
||||||
|
# This data is then be picked up by status-embed.yml action.
|
||||||
|
pr_artifact:
|
||||||
|
name: Produce Pull Request payload artifact
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Prepare Pull Request Payload artifact
|
||||||
|
id: prepare-artifact
|
||||||
|
if: always() && github.event_name == 'pull_request'
|
||||||
|
continue-on-error: true
|
||||||
|
run: cat $GITHUB_EVENT_PATH | jq '.pull_request' > pull_request_payload.json
|
||||||
|
|
||||||
|
- name: Upload a Build Artifact
|
||||||
|
if: always() && steps.prepare-artifact.outcome == 'success'
|
||||||
|
continue-on-error: true
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: pull-request-payload
|
||||||
|
path: pull_request_payload.json
|
64
.github/workflows/status_embed.yml
vendored
Normal file
64
.github/workflows/status_embed.yml
vendored
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
---
|
||||||
|
name: Status Embed
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_run:
|
||||||
|
workflows:
|
||||||
|
- CI
|
||||||
|
types:
|
||||||
|
- completed
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
status_embed:
|
||||||
|
name: Send Status Embed to Discord
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# A workflow_run event does not contain all the information
|
||||||
|
# we need for a PR embed. That's why we upload an artifact
|
||||||
|
# with that information in the CI workflow.
|
||||||
|
- name: Get Pull Request Information
|
||||||
|
id: pr_info
|
||||||
|
if: github.event.workflow_run.event == 'pull_request'
|
||||||
|
run: |
|
||||||
|
curl -s -H "Authorization: token $GITHUB_TOKEN" ${{ github.event.workflow_run.artifacts_url }} > artifacts.json
|
||||||
|
DOWNLOAD_URL=$(cat artifacts.json | jq -r '.artifacts[] | select(.name == "pull-request-payload") | .archive_download_url')
|
||||||
|
[ -z "$DOWNLOAD_URL" ] && exit 1
|
||||||
|
curl -sSL -H "Authorization: token $GITHUB_TOKEN" -o pull_request_payload.zip $DOWNLOAD_URL || exit 2
|
||||||
|
unzip -p pull_request_payload.zip > pull_request_payload.json
|
||||||
|
[ -s pull_request_payload.json ] || exit 3
|
||||||
|
echo "pr_author_login=$(jq -r '.user.login // empty' pull_request_payload.json)" >> $GITHUB_OUTPUT
|
||||||
|
echo "pr_number=$(jq -r '.number // empty' pull_request_payload.json)" >> $GITHUB_OUTPUT
|
||||||
|
echo "pr_title=$(jq -r '.title // empty' pull_request_payload.json)" >> $GITHUB_OUTPUT
|
||||||
|
echo "pr_source=$(jq -r '.head.label // empty' pull_request_payload.json)" >> $GITHUB_OUTPUT
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
# Send an informational status embed to Discord instead of the
|
||||||
|
# standard embeds that Discord sends. This embed will contain
|
||||||
|
# more information and we can fine tune when we actually want
|
||||||
|
# to send an embed.
|
||||||
|
- name: GitHub Actions Status Embed for Discord
|
||||||
|
uses: SebastiaanZ/github-status-embed-for-discord@v0.3.0
|
||||||
|
with:
|
||||||
|
# Our GitHub Actions webhook
|
||||||
|
webhook_id: "1051784242318815242"
|
||||||
|
webhook_token: ${{ secrets.webhook_token }}
|
||||||
|
|
||||||
|
# We need to provide the information of the workflow that
|
||||||
|
# triggered this workflow instead of this workflow.
|
||||||
|
workflow_name: ${{ github.event.workflow_run.name }}
|
||||||
|
run_id: ${{ github.event.workflow_run.id }}
|
||||||
|
run_number: ${{ github.event.workflow_run.run_number }}
|
||||||
|
status: ${{ github.event.workflow_run.conclusion }}
|
||||||
|
sha: ${{ github.event.workflow_run.head_sha }}
|
||||||
|
|
||||||
|
# Now we can use the information extracted in the previous step:
|
||||||
|
pr_author_login: ${{ steps.pr_info.outputs.pr_author_login }}
|
||||||
|
pr_number: ${{ steps.pr_info.outputs.pr_number }}
|
||||||
|
pr_title: ${{ steps.pr_info.outputs.pr_title }}
|
||||||
|
pr_source: ${{ steps.pr_info.outputs.pr_source }}
|
93
.github/workflows/unit-tests.yml
vendored
Normal file
93
.github/workflows/unit-tests.yml
vendored
Normal file
|
@ -0,0 +1,93 @@
|
||||||
|
---
|
||||||
|
name: Unit-Tests
|
||||||
|
|
||||||
|
on: workflow_call
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
unit-tests:
|
||||||
|
runs-on: ${{ matrix.platform }}
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false # Allows for matrix sub-jobs to fail without cancelling the rest
|
||||||
|
matrix:
|
||||||
|
platform: [ubuntu-latest, windows-latest]
|
||||||
|
python-version: ["3.8", "3.12"]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup poetry
|
||||||
|
id: poetry_setup
|
||||||
|
uses: ItsDrike/setup-poetry@v1
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
install-args: "--without lint --without release"
|
||||||
|
|
||||||
|
- name: Run pytest
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
poetry run task test
|
||||||
|
|
||||||
|
python .github/scripts/normalize_coverage.py
|
||||||
|
mv .coverage .coverage.${{ matrix.platform }}.${{ matrix.python-version }}
|
||||||
|
|
||||||
|
- name: Upload coverage artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: coverage.${{ matrix.platform }}.${{ matrix.python-version }}
|
||||||
|
path: .coverage.${{ matrix.platform }}.${{ matrix.python-version }}
|
||||||
|
retention-days: 1
|
||||||
|
if-no-files-found: error
|
||||||
|
|
||||||
|
upload-coverage:
|
||||||
|
needs: [unit-tests]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup poetry
|
||||||
|
id: poetry_setup
|
||||||
|
uses: ItsDrike/setup-poetry@v1
|
||||||
|
with:
|
||||||
|
python-version: 3.12
|
||||||
|
install-args: "--no-root --only test"
|
||||||
|
|
||||||
|
- name: Download all coverage artifacts
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
pattern: coverage.*
|
||||||
|
merge-multiple: true # support downloading multiple artifacts to same dir
|
||||||
|
|
||||||
|
# Combine all of the coverage files (for each os, python version - from matrix)
|
||||||
|
# into a single coverage file (.coverage), and produce a final (combined) coverage report.
|
||||||
|
- name: Combine coverage
|
||||||
|
run: |
|
||||||
|
coverage combine
|
||||||
|
coverage xml
|
||||||
|
coverage report
|
||||||
|
|
||||||
|
- name: Upload coverage to codeclimate
|
||||||
|
uses: paambaati/codeclimate-action@v8.0.0
|
||||||
|
env:
|
||||||
|
CC_TEST_REPORTER_ID: 0ec6191ea237656410b90dded9352a5b16d68f8d86d60ea8944abd41d532e869
|
||||||
|
with:
|
||||||
|
coverageLocations: .coverage.xml:coverage.py
|
||||||
|
|
||||||
|
tests-done:
|
||||||
|
needs: [unit-tests]
|
||||||
|
if: always() && !cancelled()
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Set status based on required jobs
|
||||||
|
env:
|
||||||
|
RESULTS: ${{ join(needs.*.result, ' ') }}
|
||||||
|
run: |
|
||||||
|
for result in $RESULTS; do
|
||||||
|
if [ "$result" != "success" ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
49
.github/workflows/validation.yml
vendored
Normal file
49
.github/workflows/validation.yml
vendored
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
---
|
||||||
|
name: Validation
|
||||||
|
|
||||||
|
on: workflow_call
|
||||||
|
|
||||||
|
env:
|
||||||
|
PRE_COMMIT_HOME: "/home/runner/.cache/pre-commit"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup poetry
|
||||||
|
id: poetry_setup
|
||||||
|
uses: ItsDrike/setup-poetry@v1
|
||||||
|
with:
|
||||||
|
python-version: 3.12
|
||||||
|
install-args: "--without release"
|
||||||
|
|
||||||
|
- name: Pre-commit Environment Caching
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key:
|
||||||
|
"precommit-${{ runner.os }}-${{ steps.poetry_setup.outputs.python-version }}-\
|
||||||
|
${{ hashFiles('./.pre-commit-config.yaml') }}"
|
||||||
|
# Restore keys allows us to perform a cache restore even if the full cache key wasn't matched.
|
||||||
|
# That way we still end up saving new cache, but we can still make use of the cache from previous
|
||||||
|
# version.
|
||||||
|
restore-keys: "precommit-${{ runner.os }}-${{ steps.poetry_setup.outputs-python-version}}-"
|
||||||
|
|
||||||
|
- name: Run pre-commit hooks
|
||||||
|
run: SKIP=ruff-linter,ruff-formatter,slotscheck,basedpyright pre-commit run --all-files
|
||||||
|
|
||||||
|
- name: Run ruff linter
|
||||||
|
run: ruff check --output-format=github --show-fixes --exit-non-zero-on-fix .
|
||||||
|
|
||||||
|
- name: Run ruff formatter
|
||||||
|
run: ruff format --diff .
|
||||||
|
|
||||||
|
- name: Run slotscheck
|
||||||
|
run: slotscheck -m mcproto
|
||||||
|
|
||||||
|
- name: Run basedpyright type checker
|
||||||
|
run: basedpyright .
|
|
@ -35,10 +35,10 @@ repos:
|
||||||
|
|
||||||
- repo: local
|
- repo: local
|
||||||
hooks:
|
hooks:
|
||||||
- id: pyright
|
- id: basedpyright
|
||||||
name: Pyright
|
name: BasedPyright
|
||||||
description: Run pyright type checker
|
description: Run BasedPyright type checker
|
||||||
entry: poetry run pyright
|
entry: poetry run basedpyright
|
||||||
language: system
|
language: system
|
||||||
types: [python]
|
types: [python]
|
||||||
pass_filenames: false # pyright runs for the entire project, it can't run for single files
|
pass_filenames: false # pyright runs for the entire project, it can't run for single files
|
||||||
|
|
0
ATTRIBUTION.md
Normal file
0
ATTRIBUTION.md
Normal file
0
LICENSE-THIRD-PARTY.txt
Normal file
0
LICENSE-THIRD-PARTY.txt
Normal file
50
poetry.lock
generated
50
poetry.lock
generated
|
@ -20,6 +20,20 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin
|
||||||
test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
|
test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
|
||||||
trio = ["trio (>=0.23)"]
|
trio = ["trio (>=0.23)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "basedpyright"
|
||||||
|
version = "1.13.3"
|
||||||
|
description = "static type checking for Python (but based)"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
files = [
|
||||||
|
{file = "basedpyright-1.13.3-py3-none-any.whl", hash = "sha256:3162c5a5f4fc99f9d53d76cbd8e24d31ad4b28b4fb26a58ab8be6e8b634c99a7"},
|
||||||
|
{file = "basedpyright-1.13.3.tar.gz", hash = "sha256:728d7098250db8d18bc4b48df8f93dfd9c79d155c3c99d41256a6caa6a21232e"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
nodejs-wheel-binaries = ">=20.13.1"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "certifi"
|
name = "certifi"
|
||||||
version = "2024.6.2"
|
version = "2024.6.2"
|
||||||
|
@ -247,6 +261,22 @@ files = [
|
||||||
{file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"},
|
{file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nodejs-wheel-binaries"
|
||||||
|
version = "20.15.1"
|
||||||
|
description = "unoffical Node.js package"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "nodejs_wheel_binaries-20.15.1-py2.py3-none-macosx_10_15_x86_64.whl", hash = "sha256:a04537555f59e53021f8a2b07fa7aaac29d7793b7fae7fbf561bf9a859f4c67a"},
|
||||||
|
{file = "nodejs_wheel_binaries-20.15.1-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:b5ff04efa56a3fcd1fd09b30f5236c12bd84c10fcb222f3c0e04e1d497342b70"},
|
||||||
|
{file = "nodejs_wheel_binaries-20.15.1-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29c3e172e3fde3c13e7509312c81700736304dbd250745d87f00e7506065f3a5"},
|
||||||
|
{file = "nodejs_wheel_binaries-20.15.1-py2.py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9740f7456a43cb09521a1ac93a4355dc8282c41420f2d61ff631a01f39e2aa18"},
|
||||||
|
{file = "nodejs_wheel_binaries-20.15.1-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:bf5e239676efabb2fbaeff2f36d0bad8e2379f260ef44e13ef2151d037e40af3"},
|
||||||
|
{file = "nodejs_wheel_binaries-20.15.1-py2.py3-none-win_amd64.whl", hash = "sha256:624936171b1aa2e1cc6d1718b1caa089e943b54df16568fa2f4576d145ac279a"},
|
||||||
|
{file = "nodejs_wheel_binaries-20.15.1.tar.gz", hash = "sha256:b2f25b4f0e9a827ae1af8218ab13a385e279c236faf7b7c821e969bb8f6b25e8"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "packaging"
|
name = "packaging"
|
||||||
version = "24.1"
|
version = "24.1"
|
||||||
|
@ -336,24 +366,6 @@ nodeenv = ">=0.11.1"
|
||||||
pyyaml = ">=5.1"
|
pyyaml = ">=5.1"
|
||||||
virtualenv = ">=20.10.0"
|
virtualenv = ">=20.10.0"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pyright"
|
|
||||||
version = "1.1.369"
|
|
||||||
description = "Command line wrapper for pyright"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
files = [
|
|
||||||
{file = "pyright-1.1.369-py3-none-any.whl", hash = "sha256:06d5167a8d7be62523ced0265c5d2f1e022e110caf57a25d92f50fb2d07bcda0"},
|
|
||||||
{file = "pyright-1.1.369.tar.gz", hash = "sha256:ad290710072d021e213b98cc7a2f90ae3a48609ef5b978f749346d1a47eb9af8"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
nodeenv = ">=1.6.0"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
all = ["twine (>=3.4.1)"]
|
|
||||||
dev = ["twine (>=3.4.1)"]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pytest"
|
name = "pytest"
|
||||||
version = "8.2.2"
|
version = "8.2.2"
|
||||||
|
@ -570,4 +582,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = "^3.11"
|
python-versions = "^3.11"
|
||||||
content-hash = "334a7105baa770bfd69d0391602995e317ccc0faa3d34539acbf7e9aa5355fee"
|
content-hash = "0201fdc3d3cb37b96f743109c61ab1a08e353bbe168de410418208199ae1a345"
|
||||||
|
|
|
@ -17,7 +17,7 @@ poethepoet = "^0.25.0"
|
||||||
[tool.poetry.group.lint.dependencies]
|
[tool.poetry.group.lint.dependencies]
|
||||||
ruff = "^0.3.2"
|
ruff = "^0.3.2"
|
||||||
pre-commit = "^3.6.2"
|
pre-commit = "^3.6.2"
|
||||||
pyright = "^1.1.359"
|
basedpyright = "^1.13.3"
|
||||||
|
|
||||||
[tool.poetry.group.test.dependencies]
|
[tool.poetry.group.test.dependencies]
|
||||||
pytest = "^8.1.1"
|
pytest = "^8.1.1"
|
||||||
|
@ -133,6 +133,7 @@ line-ending = "lf"
|
||||||
|
|
||||||
[tool.pyright]
|
[tool.pyright]
|
||||||
pythonVersion = "3.11"
|
pythonVersion = "3.11"
|
||||||
|
typeCheckingMode = "standard"
|
||||||
|
|
||||||
reportUntypedFunctionDecorator = "error"
|
reportUntypedFunctionDecorator = "error"
|
||||||
reportUntypedClassDecorator = "error"
|
reportUntypedClassDecorator = "error"
|
||||||
|
@ -202,6 +203,10 @@ help = "Run ruff formatter"
|
||||||
cmd = "pytest -v --failed-first"
|
cmd = "pytest -v --failed-first"
|
||||||
help = "Run pytest tests"
|
help = "Run pytest tests"
|
||||||
|
|
||||||
|
[tool.poe.tasks.pyright]
|
||||||
|
cmd = "basedpyright ."
|
||||||
|
help = "Run BasedPyright type-checker"
|
||||||
|
|
||||||
[tool.poe.tasks.retest]
|
[tool.poe.tasks.retest]
|
||||||
cmd = "pytest -v --last-failed"
|
cmd = "pytest -v --last-failed"
|
||||||
help = "Run previously failed tests using pytest"
|
help = "Run previously failed tests using pytest"
|
||||||
|
|
0
tests/src/__init__.py
Normal file
0
tests/src/__init__.py
Normal file
Loading…
Reference in a new issue