mirror of
https://github.com/miaowware/qrm2.git
synced 2024-10-31 22:37:11 -04:00
Compare commits
60 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
4b55ab49b7 | ||
|
cf378a2ef4 | ||
|
13a8a63300 | ||
|
23619949d7 | ||
|
444687bd12 | ||
|
86da8d135a | ||
|
67add85a7a | ||
|
abdc5ebacb | ||
|
a5cbb5a09a | ||
|
ce99cc194e | ||
|
f8d7316071 | ||
|
9feeb01e42 | ||
|
fcb682ec4a | ||
|
c8a1128927 | ||
|
df08cefe25 | ||
|
cf93773a3c | ||
|
642b49041a | ||
|
e95f991300 | ||
|
56ae14a5c3 | ||
|
30c6e96883 | ||
|
44a6905f7b | ||
|
d7de78e582 | ||
|
b000c9173e | ||
|
5460dd811b | ||
|
a00d613430 | ||
|
6b0cdb6249 | ||
|
4eed94b55b | ||
|
3110961a3a | ||
|
a4c8a056ac | ||
|
9368ccd9e2 | ||
|
8efd958314 | ||
|
4803bf89b2 | ||
|
c82216cae6 | ||
|
1650cd50dc | ||
|
1b0b244f99 | ||
|
5db77f78d9 | ||
|
c7ea5e0998 | ||
|
adffd82127 | ||
|
970159e81b | ||
|
f5aeefc934 | ||
|
aac9262469 | ||
|
b472cdfa25 | ||
|
585cae8b97 | ||
|
c3fbd3e719 | ||
|
7eadb50b96 | ||
|
98642c099d | ||
|
ef6f01d1a3 | ||
|
91c5217d24 | ||
|
4659cf2a48 | ||
|
d33dad9f89 | ||
|
be083d2cc8 | ||
|
e2d1d1fc87 | ||
|
68eaeff476 | ||
|
f690ebb357 | ||
|
51e571b97d | ||
|
85ac05c337 | ||
|
718b2a7a80 | ||
|
0189db8792 | ||
|
80d6a989cc | ||
|
8f1782dcc0 |
12
.github/workflows/checks.yml
vendored
Normal file
12
.github/workflows/checks.yml
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
name: "Checks"
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types: [opened, synchronize, reopened, ready_for_review, labeled, unlabeled]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
changelog:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: dangoslen/changelog-enforcer@v2
|
81
.github/workflows/docker.yml
vendored
81
.github/workflows/docker.yml
vendored
@ -3,6 +3,10 @@
|
|||||||
name: Docker Build and Deploy
|
name: Docker Build and Deploy
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
push:
|
push:
|
||||||
# Publish `master` as Docker `dev` image.
|
# Publish `master` as Docker `dev` image.
|
||||||
branches:
|
branches:
|
||||||
@ -11,25 +15,42 @@ on:
|
|||||||
tags:
|
tags:
|
||||||
- v*
|
- v*
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
docker:
|
docker:
|
||||||
name: Build and push docker images
|
name: Build and push docker images
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
packages: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: classabbyamp/treeless-checkout-action@v1
|
||||||
with:
|
|
||||||
ref: ${{ github.ref }}
|
|
||||||
|
|
||||||
- name: Write ref to file
|
- name: Write ref to file
|
||||||
|
if: ${{ github.event_name != 'pull_request' }}
|
||||||
run: git rev-list -n 1 $GITHUB_REF > ./git_commit
|
run: git rev-list -n 1 $GITHUB_REF > ./git_commit
|
||||||
|
|
||||||
- name: Build image
|
- name: Docker metadata
|
||||||
id: build_image
|
id: meta
|
||||||
run: |
|
uses: docker/metadata-action@v4
|
||||||
IMAGE_NAME=${GITHUB_REPOSITORY#*/}
|
with:
|
||||||
echo ::set-output name=image_name::$IMAGE_NAME
|
images: |
|
||||||
docker build . --file Dockerfile -t $IMAGE_NAME
|
ghcr.io/${{ github.repository }}
|
||||||
|
tags: |
|
||||||
|
type=sha,prefix=
|
||||||
|
type=raw,value=dev,enable={{is_default_branch}}
|
||||||
|
type=match,pattern=v(.*),group=1
|
||||||
|
labels: |
|
||||||
|
org.opencontainers.image.authors=classabbyamp and 0x5c
|
||||||
|
org.opencontainers.image.url=https://github.com/miaowware/qrm2
|
||||||
|
org.opencontainers.image.source=https://github.com/${{ github.repository }}
|
||||||
|
org.opencontainers.image.vendor=miaowware
|
||||||
|
org.opencontainers.image.title=qrm2
|
||||||
|
org.opencontainers.image.description=Discord bot with ham radio functions
|
||||||
|
org.opencontainers.image.licenses=LiLiQ-Rplus-1.1
|
||||||
|
|
||||||
- name: Login to Github Container Registry
|
- name: Login to Github Container Registry
|
||||||
uses: docker/login-action@v1
|
uses: docker/login-action@v1
|
||||||
@ -38,38 +59,10 @@ jobs:
|
|||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Tag image
|
- name: Build and push
|
||||||
id: tag_image
|
uses: docker/build-push-action@v5
|
||||||
run: |
|
|
||||||
IMAGE_NAME=${{ steps.build_image.outputs.image_name }}
|
|
||||||
IMAGE_ID=ghcr.io/${{ github.repository_owner }}/$IMAGE_NAME
|
|
||||||
echo IMAGE_ID=$IMAGE_ID
|
|
||||||
echo ::set-output name=image_id::$IMAGE_ID
|
|
||||||
|
|
||||||
# Strip git ref prefix from version
|
|
||||||
VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,')
|
|
||||||
# Strip "v" prefix from tag name
|
|
||||||
[[ "${{ github.ref }}" == "refs/tags/"* ]] && VERSION=$(echo $VERSION | sed -e 's/^v//')
|
|
||||||
# if version is master, set version to dev
|
|
||||||
[[ "$VERSION" == "master" ]] && VERSION=dev
|
|
||||||
echo VERSION=$VERSION
|
|
||||||
echo ::set-output name=version::$VERSION
|
|
||||||
|
|
||||||
# tag dev or x.x.x
|
|
||||||
docker tag $IMAGE_NAME $IMAGE_ID:$VERSION
|
|
||||||
# tag latest if not a dev release
|
|
||||||
[[ "$VERSION" != "dev" ]] && docker tag $IMAGE_NAME $IMAGE_ID:latest || true
|
|
||||||
|
|
||||||
- name: Push images to registry
|
|
||||||
run: |
|
|
||||||
[[ "${{ steps.tag_image.outputs.version }}" != "dev" ]] && docker push ${{ steps.tag_image.outputs.image_id }}:latest || true
|
|
||||||
docker push ${{ steps.tag_image.outputs.image_id }}:${{ steps.tag_image.outputs.version }}
|
|
||||||
|
|
||||||
- name: Deploy official images
|
|
||||||
id: deploy_images
|
|
||||||
uses: satak/webrequest-action@v1
|
|
||||||
with:
|
with:
|
||||||
url: ${{ secrets.DEPLOY_URL }}
|
context: .
|
||||||
method: POST
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
headers: '{"Authentication": "Token ${{ secrets.DEPLOY_TOKEN }}"}'
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
payload: '{"version": "${{ steps.tag_image.outputs.version }}"}'
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
40
.github/workflows/linting.yml
vendored
40
.github/workflows/linting.yml
vendored
@ -1,44 +1,22 @@
|
|||||||
name: Linting
|
name: Linting
|
||||||
|
|
||||||
on: [push,pull_request]
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
pull_request:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
precheck:
|
|
||||||
runs-on: ubuntu-20.04
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
should_skip: ${{ steps.skip_check.outputs.should_skip }}
|
|
||||||
steps:
|
|
||||||
- id: skip_check
|
|
||||||
uses: fkirc/skip-duplicate-actions@master
|
|
||||||
with:
|
|
||||||
# skip concurrent jobs if they are on the same thing
|
|
||||||
concurrent_skipping: 'same_content'
|
|
||||||
# never skip PR + manual/scheduled runs
|
|
||||||
do_not_skip: '["pull_request", "workflow_dispatch", "schedule"]'
|
|
||||||
|
|
||||||
flake8:
|
flake8:
|
||||||
needs: precheck
|
runs-on: ubuntu-latest
|
||||||
if: ${{ needs.precheck.outputs.should_skip != 'true' }}
|
|
||||||
runs-on: ubuntu-20.04
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
python-version: [3.9]
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@master
|
- uses: actions/checkout@master
|
||||||
- name: Setup Python ${{ matrix.python-version }}
|
- name: Setup Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v1
|
uses: actions/setup-python@v3
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: "3.9"
|
||||||
architecture: x64
|
architecture: x64
|
||||||
- name: Install flake8
|
- name: Install flake8
|
||||||
run: pip install flake8
|
run: pip install flake8
|
||||||
- name: Run flake8
|
- name: Run flake8
|
||||||
uses: suo/flake8-github-action@releases/v1
|
run: flake8 --format='::error title=flake8,file=%(path)s,line=%(row)d,col=%(col)d::[%(code)s] %(text)s'
|
||||||
with:
|
|
||||||
checkName: 'flake8' # NOTE: this needs to be the same as the job name
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
|
10
.github/workflows/release.yml
vendored
10
.github/workflows/release.yml
vendored
@ -12,6 +12,8 @@ jobs:
|
|||||||
release:
|
release:
|
||||||
name: Create Release
|
name: Create Release
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
@ -46,12 +48,10 @@ jobs:
|
|||||||
|
|
||||||
- name: Publish Release
|
- name: Publish Release
|
||||||
id: create_release
|
id: create_release
|
||||||
uses: actions/create-release@v1
|
uses: ncipollo/release-action@v1
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
with:
|
||||||
tag_name: ${{ env.tag_version }}
|
tag: ${{ env.tag_version }}
|
||||||
release_name: ${{ env.tag_subject }}
|
name: ${{ env.tag_subject }}
|
||||||
body: |
|
body: |
|
||||||
${{ env.tag_body }}
|
${{ env.tag_body }}
|
||||||
|
|
||||||
|
49
CHANGELOG.md
49
CHANGELOG.md
@ -7,6 +7,47 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
|
|
||||||
|
|
||||||
|
## [2.9.2] - 2023-12-15
|
||||||
|
### Added
|
||||||
|
- `?drapmap` command to display NOAA D Region Absorption Predictions map.
|
||||||
|
- Support for the new username format.
|
||||||
|
### Fixed
|
||||||
|
- Issue where `?solarweather` would not show a picture (#474).
|
||||||
|
- Issue where `?metar` and `?taf` failed to fetch data (#475).
|
||||||
|
|
||||||
|
|
||||||
|
## [2.9.1] - 2023-01-29
|
||||||
|
### Fixed
|
||||||
|
- Issue where embeds would not work for users without avatars (#467).
|
||||||
|
- Issue where embeds would show the wrong timezone.
|
||||||
|
- Several issues with `?call` caused by issues in a library (#466).
|
||||||
|
|
||||||
|
|
||||||
|
## [2.9.0] - 2023-01-13
|
||||||
|
### Changed
|
||||||
|
- Migrated to Pycord.
|
||||||
|
### Removed
|
||||||
|
- Long-deprecated aliases for `?solarweather`.
|
||||||
|
### Fixed
|
||||||
|
- Issue where ?hamstudy would not work in direct messages (#442).
|
||||||
|
- Issue where `?solarweather` would not show a picture (#461).
|
||||||
|
|
||||||
|
|
||||||
|
## [2.8.0] - 2022-06-24
|
||||||
|
### Removed
|
||||||
|
- `?ae7q` command (#448).
|
||||||
|
|
||||||
|
|
||||||
|
## [2.7.6] - 2022-06-13
|
||||||
|
### Fixed
|
||||||
|
- Issue where `?muf` and `?fof2` would fail with an aiohttp error.
|
||||||
|
|
||||||
|
|
||||||
|
## [2.7.5] - 2022-06-08
|
||||||
|
### Changed
|
||||||
|
- Bumped ctyparser to 2.2.1.
|
||||||
|
|
||||||
|
|
||||||
## [2.7.4] - 2021-10-07
|
## [2.7.4] - 2021-10-07
|
||||||
### Added
|
### Added
|
||||||
- a new way to support qrm's development.
|
- a new way to support qrm's development.
|
||||||
@ -214,7 +255,13 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||||||
## 1.0.0 - 2019-07-31 [YANKED]
|
## 1.0.0 - 2019-07-31 [YANKED]
|
||||||
|
|
||||||
|
|
||||||
[Unreleased]: https://github.com/miaowware/qrm2/compare/v2.7.4...HEAD
|
[Unreleased]: https://github.com/miaowware/qrm2/compare/v2.9.2...HEAD
|
||||||
|
[2.9.2]: https://github.com/miaowware/qrm2/releases/tag/v2.9.2
|
||||||
|
[2.9.1]: https://github.com/miaowware/qrm2/releases/tag/v2.9.1
|
||||||
|
[2.9.0]: https://github.com/miaowware/qrm2/releases/tag/v2.9.0
|
||||||
|
[2.8.0]: https://github.com/miaowware/qrm2/releases/tag/v2.8.0
|
||||||
|
[2.7.6]: https://github.com/miaowware/qrm2/releases/tag/v2.7.6
|
||||||
|
[2.7.5]: https://github.com/miaowware/qrm2/releases/tag/v2.7.5
|
||||||
[2.7.4]: https://github.com/miaowware/qrm2/releases/tag/v2.7.4
|
[2.7.4]: https://github.com/miaowware/qrm2/releases/tag/v2.7.4
|
||||||
[2.7.3]: https://github.com/miaowware/qrm2/releases/tag/v2.7.3
|
[2.7.3]: https://github.com/miaowware/qrm2/releases/tag/v2.7.3
|
||||||
[2.7.2]: https://github.com/miaowware/qrm2/releases/tag/v2.7.2
|
[2.7.2]: https://github.com/miaowware/qrm2/releases/tag/v2.7.2
|
||||||
|
29
Dockerfile
29
Dockerfile
@ -1,30 +1,31 @@
|
|||||||
FROM voidlinux/voidlinux
|
FROM ghcr.io/void-linux/void-musl-full
|
||||||
|
|
||||||
COPY . /app
|
COPY . /app
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
ENV PYTHON_BIN python3
|
ARG REPOSITORY=https://repo-fastly.voidlinux.org/current
|
||||||
|
ARG PKGS="cairo libjpeg-turbo"
|
||||||
|
ARG UID 1000
|
||||||
|
ARG GID 1000
|
||||||
|
|
||||||
RUN \
|
RUN \
|
||||||
echo "**** update packages ****" && \
|
echo "**** update system ****" && \
|
||||||
xbps-install -Suy && \
|
xbps-install -Suy xbps -R ${REPOSITORY} && \
|
||||||
|
xbps-install -uy -R ${REPOSITORY} && \
|
||||||
echo "**** install system packages ****" && \
|
echo "**** install system packages ****" && \
|
||||||
export runtime_deps='cairo libjpeg-turbo' && \
|
xbps-install -y -R ${REPOSITORY} ${PKGS} python3.11 && \
|
||||||
export runtime_pkgs="${runtime_deps} python3-pip python3" && \
|
|
||||||
xbps-install -y $runtime_pkgs && \
|
|
||||||
echo "**** install pip packages ****" && \
|
echo "**** install pip packages ****" && \
|
||||||
pip3 install -U pip setuptools wheel && \
|
python3.11 -m venv botenv && \
|
||||||
pip3 install -r requirements.txt && \
|
botenv/bin/pip install -U pip setuptools wheel && \
|
||||||
|
botenv/bin/pip install -r requirements.txt && \
|
||||||
echo "**** clean up ****" && \
|
echo "**** clean up ****" && \
|
||||||
rm -rf \
|
rm -rf \
|
||||||
/root/.cache \
|
/root/.cache \
|
||||||
/tmp/* \
|
/tmp/* \
|
||||||
/var/cache/xbps/*
|
/var/cache/xbps/*
|
||||||
|
|
||||||
ARG UID
|
ENV PYTHONUNBUFFERED 1
|
||||||
ENV UID=${UID:-1000}
|
|
||||||
ARG GID
|
|
||||||
ENV GID=${GID:-1000}
|
|
||||||
USER $UID:$GID
|
USER $UID:$GID
|
||||||
|
|
||||||
CMD ["/bin/sh", "run.sh", "--pass-errors", "--no-botenv"]
|
CMD ["/bin/sh", "run.sh", "--pass-errors"]
|
||||||
|
2
Makefile
2
Makefile
@ -12,7 +12,7 @@
|
|||||||
# Those are the defaults; they can be over-ridden if specified
|
# Those are the defaults; they can be over-ridden if specified
|
||||||
# at en environment level or as 'make' arguments.
|
# at en environment level or as 'make' arguments.
|
||||||
BOTENV ?= botenv
|
BOTENV ?= botenv
|
||||||
PYTHON_BIN ?= python3.9
|
PYTHON_BIN ?= python3.11
|
||||||
PIP_OUTPUT ?= -q
|
PIP_OUTPUT ?= -q
|
||||||
|
|
||||||
|
|
||||||
|
@ -23,14 +23,11 @@ This is the easiest method for running the bot without any modifications.
|
|||||||
version: '3'
|
version: '3'
|
||||||
services:
|
services:
|
||||||
qrm2:
|
qrm2:
|
||||||
image: "docker.pkg.github.com/miaowware/qrm2/qrm2:latest"
|
image: "ghcr.io/miaowware/qrm2:latest"
|
||||||
restart: on-failure
|
restart: on-failure
|
||||||
volumes:
|
volumes:
|
||||||
- "./data:/app/data:rw"
|
- "./data:/app/data:rw"
|
||||||
environment:
|
|
||||||
- PYTHONUNBUFFERED=1
|
|
||||||
```
|
```
|
||||||
*Note that Github's registry requires [a few extra steps](https://docs.github.com/en/packages/using-github-packages-with-your-projects-ecosystem/configuring-docker-for-use-with-github-packages) during the initial setup.*
|
|
||||||
|
|
||||||
3. Create a subdirectory named `data`.
|
3. Create a subdirectory named `data`.
|
||||||
|
|
||||||
@ -64,8 +61,6 @@ This is the easiest method to run the bot with modifications.
|
|||||||
restart: on-failure
|
restart: on-failure
|
||||||
volumes:
|
volumes:
|
||||||
- "./data:/app/data:rw"
|
- "./data:/app/data:rw"
|
||||||
environment:
|
|
||||||
- PYTHONUNBUFFERED=1
|
|
||||||
```
|
```
|
||||||
|
|
||||||
3. Create a subdirectory named `data`.
|
3. Create a subdirectory named `data`.
|
||||||
@ -112,4 +107,4 @@ This methods is not very nice to use.
|
|||||||
|
|
||||||
Where `[image]` is either of:
|
Where `[image]` is either of:
|
||||||
- `qrm2:local-latest` if you are building your own.
|
- `qrm2:local-latest` if you are building your own.
|
||||||
- `docker.pkg.github.com/miaowware/qrm2/qrm2:latest` if you want to use the prebuilt image.
|
- `ghcr.io/miaowware/qrm2:latest` if you want to use the prebuilt image.
|
||||||
|
@ -38,7 +38,7 @@ All issues and requests related to resources (including maps, band charts, data)
|
|||||||
|
|
||||||
## Copyright
|
## Copyright
|
||||||
|
|
||||||
Copyright (C) 2019-2021 classabbyamp, 0x5c
|
Copyright (C) 2019-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
This program is released under the terms of the *Québec Free and Open-Source Licence – Strong Reciprocity (LiLiQ-R+)*, version 1.1.
|
This program is released under the terms of the *Québec Free and Open-Source Licence – Strong Reciprocity (LiLiQ-R+)*, version 1.1.
|
||||||
See [`LICENCE`](LICENCE) for full license text (Français / English).
|
See [`LICENCE`](LICENCE) for full license text (Français / English).
|
||||||
|
26
common.py
26
common.py
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Common tools for the bot.
|
Common tools for the bot.
|
||||||
---
|
---
|
||||||
Copyright (C) 2019-2021 classabbyamp, 0x5c
|
Copyright (C) 2019-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
@ -12,16 +12,17 @@ import enum
|
|||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
import traceback
|
import traceback
|
||||||
from datetime import datetime
|
from datetime import datetime, timezone
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from types import SimpleNamespace
|
from types import SimpleNamespace
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
|
import httpx
|
||||||
|
|
||||||
import discord
|
import discord
|
||||||
import discord.ext.commands as commands
|
import discord.ext.commands as commands
|
||||||
from discord import Emoji, Reaction, PartialEmoji
|
from discord import Emoji, PartialEmoji
|
||||||
|
|
||||||
import data.options as opt
|
import data.options as opt
|
||||||
|
|
||||||
@ -125,12 +126,16 @@ class ImagesGroup(collections.abc.Mapping):
|
|||||||
|
|
||||||
class BotHTTPError(Exception):
|
class BotHTTPError(Exception):
|
||||||
"""Raised whan a requests fails (status != 200) in a command."""
|
"""Raised whan a requests fails (status != 200) in a command."""
|
||||||
def __init__(self, response: aiohttp.ClientResponse):
|
def __init__(self, response: aiohttp.ClientResponse | httpx.Response):
|
||||||
msg = f"Request failed: {response.status} {response.reason}"
|
if isinstance(response, aiohttp.ClientResponse):
|
||||||
super().__init__(msg)
|
|
||||||
self.response = response
|
|
||||||
self.status = response.status
|
self.status = response.status
|
||||||
self.reason = response.reason
|
self.reason = response.reason
|
||||||
|
else:
|
||||||
|
self.status = response.status_code
|
||||||
|
self.reason = response.reason_phrase
|
||||||
|
msg = f"Request failed: {self.status} {self.reason}"
|
||||||
|
super().__init__(msg)
|
||||||
|
self.response = response
|
||||||
|
|
||||||
|
|
||||||
# --- Converters ---
|
# --- Converters ---
|
||||||
@ -160,8 +165,9 @@ class GlobalChannelConverter(commands.IDConverter):
|
|||||||
|
|
||||||
def embed_factory(ctx: commands.Context) -> discord.Embed:
|
def embed_factory(ctx: commands.Context) -> discord.Embed:
|
||||||
"""Creates an embed with neutral colour and standard footer."""
|
"""Creates an embed with neutral colour and standard footer."""
|
||||||
embed = discord.Embed(timestamp=datetime.utcnow(), colour=colours.neutral)
|
embed = discord.Embed(timestamp=datetime.now(timezone.utc), colour=colours.neutral)
|
||||||
embed.set_footer(text=str(ctx.author), icon_url=str(ctx.author.avatar_url))
|
if ctx.author:
|
||||||
|
embed.set_footer(text=str(ctx.author), icon_url=str(ctx.author.display_avatar))
|
||||||
return embed
|
return embed
|
||||||
|
|
||||||
|
|
||||||
@ -178,7 +184,7 @@ def error_embed_factory(ctx: commands.Context, exception: Exception, debug_mode:
|
|||||||
return embed
|
return embed
|
||||||
|
|
||||||
|
|
||||||
async def add_react(msg: discord.Message, react: Union[Emoji, Reaction, PartialEmoji, str]):
|
async def add_react(msg: discord.Message, react: Union[Emoji, PartialEmoji, str]):
|
||||||
try:
|
try:
|
||||||
await msg.add_reaction(react)
|
await msg.add_reaction(react)
|
||||||
except discord.Forbidden:
|
except discord.Forbidden:
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
-r requirements.txt
|
-r requirements.txt
|
||||||
flake8
|
flake8
|
||||||
discord.py-stubs==1.7.3
|
mypy
|
||||||
|
429
exts/ae7q.py
429
exts/ae7q.py
@ -1,435 +1,28 @@
|
|||||||
"""
|
"""
|
||||||
ae7q extension for qrm
|
ae7q extension for qrm
|
||||||
---
|
---
|
||||||
Copyright (C) 2019-2021 classabbyamp, 0x5c
|
Copyright (C) 2019-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
# Test callsigns:
|
|
||||||
# KN8U: active, restricted
|
|
||||||
# AB2EE: expired, restricted
|
|
||||||
# KE8FGB: assigned once, no restrictions
|
|
||||||
# KV4AAA: unassigned, no records
|
|
||||||
# KC4USA: reserved, no call history, *but* has application history
|
|
||||||
|
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
from bs4 import BeautifulSoup
|
|
||||||
|
|
||||||
import discord.ext.commands as commands
|
import discord.ext.commands as commands
|
||||||
|
|
||||||
import common as cmn
|
from common import embed_factory, colours
|
||||||
|
|
||||||
|
|
||||||
class AE7QCog(commands.Cog):
|
class AE7QCog(commands.Cog):
|
||||||
def __init__(self, bot: commands.Bot):
|
@commands.command(name="ae7q", aliases=["ae"], case_insensitive=True)
|
||||||
self.bot = bot
|
async def _ae7q_lookup(self, ctx: commands.Context, *, _):
|
||||||
self.session = aiohttp.ClientSession(connector=bot.qrm.connector)
|
"""Removed in v2.8.0"""
|
||||||
|
embed = embed_factory(ctx)
|
||||||
@commands.group(name="ae7q", aliases=["ae"], case_insensitive=True, category=cmn.Cats.LOOKUP)
|
embed.colour = colours.bad
|
||||||
async def _ae7q_lookup(self, ctx: commands.Context):
|
embed.title = "Command removed"
|
||||||
"""Looks up a callsign, FRN, or Licensee ID on [ae7q.com](http://ae7q.com/)."""
|
embed.description = ("This command was removed in v2.8.0.\n"
|
||||||
if ctx.invoked_subcommand is None:
|
"For context, see [this Github issue](https://github.com/miaowware/qrm2/issues/448)")
|
||||||
await ctx.send_help(ctx.command)
|
|
||||||
|
|
||||||
@_ae7q_lookup.command(name="call", aliases=["c"], category=cmn.Cats.LOOKUP)
|
|
||||||
async def _ae7q_call(self, ctx: commands.Context, callsign: str):
|
|
||||||
"""Looks up the history of a callsign on [ae7q.com](http://ae7q.com/)."""
|
|
||||||
with ctx.typing():
|
|
||||||
callsign = callsign.upper()
|
|
||||||
desc = ""
|
|
||||||
base_url = "http://ae7q.com/query/data/CallHistory.php?CALL="
|
|
||||||
embed = cmn.embed_factory(ctx)
|
|
||||||
|
|
||||||
if not callsign.isalnum():
|
|
||||||
embed = cmn.embed_factory(ctx)
|
|
||||||
embed.title = "AE7Q History for Callsign"
|
|
||||||
embed.colour = cmn.colours.bad
|
|
||||||
embed.description = "Not a valid callsign!"
|
|
||||||
await ctx.send(embed=embed)
|
await ctx.send(embed=embed)
|
||||||
return
|
|
||||||
|
|
||||||
async with self.session.get(base_url + callsign) as resp:
|
|
||||||
if resp.status != 200:
|
|
||||||
raise cmn.BotHTTPError(resp)
|
|
||||||
page = await resp.text()
|
|
||||||
|
|
||||||
soup = BeautifulSoup(page, features="html.parser")
|
|
||||||
tables = [[row for row in table.find_all("tr")] for table in soup.select("table.Database")]
|
|
||||||
|
|
||||||
table = tables[0]
|
|
||||||
|
|
||||||
# find the first table in the page, and use it to make a description
|
|
||||||
if len(table[0]) == 1:
|
|
||||||
for row in table:
|
|
||||||
desc += " ".join(row.getText().split())
|
|
||||||
desc += "\n"
|
|
||||||
desc = desc.replace(callsign, f"`{callsign}`")
|
|
||||||
table = tables[1]
|
|
||||||
|
|
||||||
table_headers = table[0].find_all("th")
|
|
||||||
first_header = "".join(table_headers[0].strings) if len(table_headers) > 0 else None
|
|
||||||
|
|
||||||
# catch if the wrong table was selected
|
|
||||||
if first_header is None or first_header != "Entity Name":
|
|
||||||
embed.title = f"AE7Q History for {callsign}"
|
|
||||||
embed.colour = cmn.colours.bad
|
|
||||||
embed.url = base_url + callsign
|
|
||||||
embed.description = desc
|
|
||||||
embed.description += f"\nNo records found for `{callsign}`"
|
|
||||||
await ctx.send(embed=embed)
|
|
||||||
return
|
|
||||||
|
|
||||||
table = await process_table(table[1:])
|
|
||||||
|
|
||||||
embed = cmn.embed_factory(ctx)
|
|
||||||
embed.title = f"AE7Q History for {callsign}"
|
|
||||||
embed.colour = cmn.colours.good
|
|
||||||
embed.url = base_url + callsign
|
|
||||||
|
|
||||||
# add the first three rows of the table to the embed
|
|
||||||
for row in table[0:3]:
|
|
||||||
header = f"**{row[0]}** ({row[1]})" # **Name** (Applicant Type)
|
|
||||||
body = (f"Class: *{row[2]}*\n"
|
|
||||||
f"Region: *{row[3]}*\n"
|
|
||||||
f"Status: *{row[4]}*\n"
|
|
||||||
f"Granted: *{row[5]}*\n"
|
|
||||||
f"Effective: *{row[6]}*\n"
|
|
||||||
f"Cancelled: *{row[7]}*\n"
|
|
||||||
f"Expires: *{row[8]}*")
|
|
||||||
embed.add_field(name=header, value=body, inline=False)
|
|
||||||
|
|
||||||
if len(table) > 3:
|
|
||||||
desc += f"\nRecords 1 to 3 of {len(table)}. See ae7q.com for more..."
|
|
||||||
|
|
||||||
embed.description = desc
|
|
||||||
|
|
||||||
await ctx.send(embed=embed)
|
|
||||||
|
|
||||||
@_ae7q_lookup.command(name="trustee", aliases=["t"], category=cmn.Cats.LOOKUP)
|
|
||||||
async def _ae7q_trustee(self, ctx: commands.Context, callsign: str):
|
|
||||||
"""Looks up the licenses for which a licensee is trustee on [ae7q.com](http://ae7q.com/)."""
|
|
||||||
with ctx.typing():
|
|
||||||
callsign = callsign.upper()
|
|
||||||
desc = ""
|
|
||||||
base_url = "http://ae7q.com/query/data/CallHistory.php?CALL="
|
|
||||||
embed = cmn.embed_factory(ctx)
|
|
||||||
|
|
||||||
if not callsign.isalnum():
|
|
||||||
embed = cmn.embed_factory(ctx)
|
|
||||||
embed.title = "AE7Q Trustee History for Callsign"
|
|
||||||
embed.colour = cmn.colours.bad
|
|
||||||
embed.description = "Not a valid callsign!"
|
|
||||||
await ctx.send(embed=embed)
|
|
||||||
return
|
|
||||||
|
|
||||||
async with self.session.get(base_url + callsign) as resp:
|
|
||||||
if resp.status != 200:
|
|
||||||
raise cmn.BotHTTPError(resp)
|
|
||||||
page = await resp.text()
|
|
||||||
|
|
||||||
soup = BeautifulSoup(page, features="html.parser")
|
|
||||||
tables = [[row for row in table.find_all("tr")] for table in soup.select("table.Database")]
|
|
||||||
|
|
||||||
try:
|
|
||||||
table = tables[2] if len(tables[0][0]) == 1 else tables[1]
|
|
||||||
except IndexError:
|
|
||||||
embed.title = f"AE7Q Trustee History for {callsign}"
|
|
||||||
embed.colour = cmn.colours.bad
|
|
||||||
embed.url = base_url + callsign
|
|
||||||
embed.description = desc
|
|
||||||
embed.description += f"\nNo records found for `{callsign}`"
|
|
||||||
await ctx.send(embed=embed)
|
|
||||||
return
|
|
||||||
|
|
||||||
table_headers = table[0].find_all("th")
|
|
||||||
first_header = "".join(table_headers[0].strings) if len(table_headers) > 0 else None
|
|
||||||
|
|
||||||
# catch if the wrong table was selected
|
|
||||||
if first_header is None or not first_header.startswith("With"):
|
|
||||||
embed.title = f"AE7Q Trustee History for {callsign}"
|
|
||||||
embed.colour = cmn.colours.bad
|
|
||||||
embed.url = base_url + callsign
|
|
||||||
embed.description = desc
|
|
||||||
embed.description += f"\nNo records found for `{callsign}`"
|
|
||||||
await ctx.send(embed=embed)
|
|
||||||
return
|
|
||||||
|
|
||||||
table = await process_table(table[2:])
|
|
||||||
|
|
||||||
embed = cmn.embed_factory(ctx)
|
|
||||||
embed.title = f"AE7Q Trustee History for {callsign}"
|
|
||||||
embed.colour = cmn.colours.good
|
|
||||||
embed.url = base_url + callsign
|
|
||||||
|
|
||||||
# add the first three rows of the table to the embed
|
|
||||||
for row in table[0:3]:
|
|
||||||
header = f"**{row[0]}** ({row[3]})" # **Name** (Applicant Type)
|
|
||||||
body = (f"Name: *{row[2]}*\n"
|
|
||||||
f"Region: *{row[1]}*\n"
|
|
||||||
f"Status: *{row[4]}*\n"
|
|
||||||
f"Granted: *{row[5]}*\n"
|
|
||||||
f"Effective: *{row[6]}*\n"
|
|
||||||
f"Cancelled: *{row[7]}*\n"
|
|
||||||
f"Expires: *{row[8]}*")
|
|
||||||
embed.add_field(name=header, value=body, inline=False)
|
|
||||||
|
|
||||||
if len(table) > 3:
|
|
||||||
desc += f"\nRecords 1 to 3 of {len(table)}. See ae7q.com for more..."
|
|
||||||
|
|
||||||
embed.description = desc
|
|
||||||
|
|
||||||
await ctx.send(embed=embed)
|
|
||||||
|
|
||||||
@_ae7q_lookup.command(name="applications", aliases=["a"], category=cmn.Cats.LOOKUP)
|
|
||||||
async def _ae7q_applications(self, ctx: commands.Context, callsign: str):
|
|
||||||
"""Looks up the application history for a callsign on [ae7q.com](http://ae7q.com/)."""
|
|
||||||
"""
|
|
||||||
with ctx.typing():
|
|
||||||
callsign = callsign.upper()
|
|
||||||
desc = ""
|
|
||||||
base_url = "http://ae7q.com/query/data/CallHistory.php?CALL="
|
|
||||||
embed = cmn.embed_factory(ctx)
|
|
||||||
|
|
||||||
if not callsign.isalnum():
|
|
||||||
embed = cmn.embed_factory(ctx)
|
|
||||||
embed.title = "AE7Q Application History for Callsign"
|
|
||||||
embed.colour = cmn.colours.bad
|
|
||||||
embed.description = "Not a valid callsign!"
|
|
||||||
await ctx.send(embed=embed)
|
|
||||||
return
|
|
||||||
|
|
||||||
async with self.session.get(base_url + callsign) as resp:
|
|
||||||
if resp.status != 200:
|
|
||||||
raise cmn.BotHTTPError(resp)
|
|
||||||
page = await resp.text()
|
|
||||||
|
|
||||||
soup = BeautifulSoup(page, features="html.parser")
|
|
||||||
tables = [[row for row in table.find_all("tr")] for table in soup.select("table.Database")]
|
|
||||||
|
|
||||||
table = tables[0]
|
|
||||||
|
|
||||||
# find the first table in the page, and use it to make a description
|
|
||||||
if len(table[0]) == 1:
|
|
||||||
for row in table:
|
|
||||||
desc += " ".join(row.getText().split())
|
|
||||||
desc += "\n"
|
|
||||||
desc = desc.replace(callsign, f"`{callsign}`")
|
|
||||||
|
|
||||||
# select the last table to get applications
|
|
||||||
table = tables[-1]
|
|
||||||
|
|
||||||
table_headers = table[0].find_all("th")
|
|
||||||
first_header = "".join(table_headers[0].strings) if len(table_headers) > 0 else None
|
|
||||||
|
|
||||||
# catch if the wrong table was selected
|
|
||||||
if first_header is None or not first_header.startswith("Receipt"):
|
|
||||||
embed.title = f"AE7Q Application History for {callsign}"
|
|
||||||
embed.colour = cmn.colours.bad
|
|
||||||
embed.url = base_url + callsign
|
|
||||||
embed.description = desc
|
|
||||||
embed.description += f"\nNo records found for `{callsign}`"
|
|
||||||
await ctx.send(embed=embed)
|
|
||||||
return
|
|
||||||
|
|
||||||
table = await process_table(table[1:])
|
|
||||||
|
|
||||||
embed = cmn.embed_factory(ctx)
|
|
||||||
embed.title = f"AE7Q Application History for {callsign}"
|
|
||||||
embed.colour = cmn.colours.good
|
|
||||||
embed.url = base_url + callsign
|
|
||||||
|
|
||||||
# add the first three rows of the table to the embed
|
|
||||||
for row in table[0:3]:
|
|
||||||
header = f"**{row[1]}** ({row[3]})" # **Name** (Callsign)
|
|
||||||
body = (f"Received: *{row[0]}*\n"
|
|
||||||
f"Region: *{row[2]}*\n"
|
|
||||||
f"Purpose: *{row[5]}*\n"
|
|
||||||
f"Last Action: *{row[7]}*\n"
|
|
||||||
f"Application Status: *{row[8]}*\n")
|
|
||||||
embed.add_field(name=header, value=body, inline=False)
|
|
||||||
|
|
||||||
if len(table) > 3:
|
|
||||||
desc += f"\nRecords 1 to 3 of {len(table)}. See ae7q.com for more..."
|
|
||||||
|
|
||||||
embed.description = desc
|
|
||||||
|
|
||||||
await ctx.send(embed=embed)
|
|
||||||
"""
|
|
||||||
raise NotImplementedError("Application history lookup not yet supported. "
|
|
||||||
"Check back in a later version of the bot.")
|
|
||||||
|
|
||||||
@_ae7q_lookup.command(name="frn", aliases=["f"], category=cmn.Cats.LOOKUP)
|
|
||||||
async def _ae7q_frn(self, ctx: commands.Context, frn: str):
|
|
||||||
"""Looks up the history of an FRN on [ae7q.com](http://ae7q.com/)."""
|
|
||||||
"""
|
|
||||||
NOTES:
|
|
||||||
- 2 tables: callsign history and application history
|
|
||||||
- If not found: no tables
|
|
||||||
"""
|
|
||||||
with ctx.typing():
|
|
||||||
base_url = "http://ae7q.com/query/data/FrnHistory.php?FRN="
|
|
||||||
embed = cmn.embed_factory(ctx)
|
|
||||||
|
|
||||||
if not frn.isdecimal():
|
|
||||||
embed = cmn.embed_factory(ctx)
|
|
||||||
embed.title = "AE7Q History for FRN"
|
|
||||||
embed.colour = cmn.colours.bad
|
|
||||||
embed.description = "Not a valid FRN!"
|
|
||||||
await ctx.send(embed=embed)
|
|
||||||
return
|
|
||||||
|
|
||||||
async with self.session.get(base_url + frn) as resp:
|
|
||||||
if resp.status != 200:
|
|
||||||
raise cmn.BotHTTPError(resp)
|
|
||||||
page = await resp.text()
|
|
||||||
|
|
||||||
soup = BeautifulSoup(page, features="html.parser")
|
|
||||||
tables = [[row for row in table.find_all("tr")] for table in soup.select("table.Database")]
|
|
||||||
|
|
||||||
if not len(tables):
|
|
||||||
embed.title = f"AE7Q History for FRN {frn}"
|
|
||||||
embed.colour = cmn.colours.bad
|
|
||||||
embed.url = base_url + frn
|
|
||||||
embed.description = f"No records found for FRN `{frn}`"
|
|
||||||
await ctx.send(embed=embed)
|
|
||||||
return
|
|
||||||
|
|
||||||
table = tables[0]
|
|
||||||
|
|
||||||
table_headers = table[0].find_all("th")
|
|
||||||
first_header = "".join(table_headers[0].strings) if len(table_headers) > 0 else None
|
|
||||||
|
|
||||||
# catch if the wrong table was selected
|
|
||||||
if first_header is None or not first_header.startswith("With Licensee"):
|
|
||||||
embed.title = f"AE7Q History for FRN {frn}"
|
|
||||||
embed.colour = cmn.colours.bad
|
|
||||||
embed.url = base_url + frn
|
|
||||||
embed.description = f"No records found for FRN `{frn}`"
|
|
||||||
await ctx.send(embed=embed)
|
|
||||||
return
|
|
||||||
|
|
||||||
table = await process_table(table[2:])
|
|
||||||
|
|
||||||
embed = cmn.embed_factory(ctx)
|
|
||||||
embed.title = f"AE7Q History for FRN {frn}"
|
|
||||||
embed.colour = cmn.colours.good
|
|
||||||
embed.url = base_url + frn
|
|
||||||
|
|
||||||
# add the first three rows of the table to the embed
|
|
||||||
for row in table[0:3]:
|
|
||||||
header = f"**{row[0]}** ({row[3]})" # **Callsign** (Applicant Type)
|
|
||||||
body = (f"Name: *{row[2]}*\n"
|
|
||||||
f"Class: *{row[4]}*\n"
|
|
||||||
f"Region: *{row[1]}*\n"
|
|
||||||
f"Status: *{row[5]}*\n"
|
|
||||||
f"Granted: *{row[6]}*\n"
|
|
||||||
f"Effective: *{row[7]}*\n"
|
|
||||||
f"Cancelled: *{row[8]}*\n"
|
|
||||||
f"Expires: *{row[9]}*")
|
|
||||||
embed.add_field(name=header, value=body, inline=False)
|
|
||||||
|
|
||||||
if len(table) > 3:
|
|
||||||
embed.description = f"Records 1 to 3 of {len(table)}. See ae7q.com for more..."
|
|
||||||
|
|
||||||
await ctx.send(embed=embed)
|
|
||||||
|
|
||||||
@_ae7q_lookup.command(name="licensee", aliases=["l"], category=cmn.Cats.LOOKUP)
|
|
||||||
async def _ae7q_licensee(self, ctx: commands.Context, licensee_id: str):
|
|
||||||
"""Looks up the history of a licensee ID on [ae7q.com](http://ae7q.com/)."""
|
|
||||||
with ctx.typing():
|
|
||||||
licensee_id = licensee_id.upper()
|
|
||||||
base_url = "http://ae7q.com/query/data/LicenseeIdHistory.php?ID="
|
|
||||||
embed = cmn.embed_factory(ctx)
|
|
||||||
|
|
||||||
if not licensee_id.isalnum():
|
|
||||||
embed = cmn.embed_factory(ctx)
|
|
||||||
embed.title = "AE7Q History for Licensee"
|
|
||||||
embed.colour = cmn.colours.bad
|
|
||||||
embed.description = "Not a valid licensee ID!"
|
|
||||||
await ctx.send(embed=embed)
|
|
||||||
return
|
|
||||||
|
|
||||||
async with self.session.get(base_url + licensee_id) as resp:
|
|
||||||
if resp.status != 200:
|
|
||||||
raise cmn.BotHTTPError(resp)
|
|
||||||
page = await resp.text()
|
|
||||||
|
|
||||||
soup = BeautifulSoup(page, features="html.parser")
|
|
||||||
tables = [[row for row in table.find_all("tr")] for table in soup.select("table.Database")]
|
|
||||||
|
|
||||||
if not len(tables):
|
|
||||||
embed.title = f"AE7Q History for Licensee {licensee_id}"
|
|
||||||
embed.colour = cmn.colours.bad
|
|
||||||
embed.url = base_url + licensee_id
|
|
||||||
embed.description = f"No records found for Licensee `{licensee_id}`"
|
|
||||||
await ctx.send(embed=embed)
|
|
||||||
return
|
|
||||||
|
|
||||||
table = tables[0]
|
|
||||||
|
|
||||||
table_headers = table[0].find_all("th")
|
|
||||||
first_header = "".join(table_headers[0].strings) if len(table_headers) > 0 else None
|
|
||||||
|
|
||||||
# catch if the wrong table was selected
|
|
||||||
if first_header is None or not first_header.startswith("With FCC"):
|
|
||||||
embed.title = f"AE7Q History for Licensee {licensee_id}"
|
|
||||||
embed.colour = cmn.colours.bad
|
|
||||||
embed.url = base_url + licensee_id
|
|
||||||
embed.description = f"No records found for Licensee `{licensee_id}`"
|
|
||||||
await ctx.send(embed=embed)
|
|
||||||
return
|
|
||||||
|
|
||||||
table = await process_table(table[2:])
|
|
||||||
|
|
||||||
embed = cmn.embed_factory(ctx)
|
|
||||||
embed.title = f"AE7Q History for Licensee {licensee_id}"
|
|
||||||
embed.colour = cmn.colours.good
|
|
||||||
embed.url = base_url + licensee_id
|
|
||||||
|
|
||||||
# add the first three rows of the table to the embed
|
|
||||||
for row in table[0:3]:
|
|
||||||
header = f"**{row[0]}** ({row[3]})" # **Callsign** (Applicant Type)
|
|
||||||
body = (f"Name: *{row[2]}*\n"
|
|
||||||
f"Class: *{row[4]}*\n"
|
|
||||||
f"Region: *{row[1]}*\n"
|
|
||||||
f"Status: *{row[5]}*\n"
|
|
||||||
f"Granted: *{row[6]}*\n"
|
|
||||||
f"Effective: *{row[7]}*\n"
|
|
||||||
f"Cancelled: *{row[8]}*\n"
|
|
||||||
f"Expires: *{row[9]}*")
|
|
||||||
embed.add_field(name=header, value=body, inline=False)
|
|
||||||
|
|
||||||
if len(table) > 3:
|
|
||||||
embed.description = f"Records 1 to 3 of {len(table)}. See ae7q.com for more..."
|
|
||||||
|
|
||||||
await ctx.send(embed=embed)
|
|
||||||
|
|
||||||
|
|
||||||
async def process_table(table: list):
|
|
||||||
"""Processes tables (*not* including headers) and returns the processed table"""
|
|
||||||
table_contents = []
|
|
||||||
for tr in table:
|
|
||||||
row = []
|
|
||||||
for td in tr.find_all("td"):
|
|
||||||
cell_val = td.getText().strip()
|
|
||||||
row.append(cell_val if cell_val else "-")
|
|
||||||
|
|
||||||
# take care of columns that span multiple rows by copying the contents rightward
|
|
||||||
if "colspan" in td.attrs and int(td.attrs["colspan"]) > 1:
|
|
||||||
for i in range(int(td.attrs["colspan"]) - 1):
|
|
||||||
row.append(row[-1])
|
|
||||||
|
|
||||||
# get rid of ditto marks by copying the contents from the previous row
|
|
||||||
for i, cell in enumerate(row):
|
|
||||||
if cell == "\"":
|
|
||||||
row[i] = table_contents[-1][i]
|
|
||||||
# add row to table
|
|
||||||
table_contents += [row]
|
|
||||||
return table_contents
|
|
||||||
|
|
||||||
|
|
||||||
def setup(bot: commands.Bot):
|
def setup(bot: commands.Bot):
|
||||||
bot.add_cog(AE7QCog(bot))
|
bot.add_cog(AE7QCog())
|
||||||
|
10
exts/base.py
10
exts/base.py
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Base extension for qrm
|
Base extension for qrm
|
||||||
---
|
---
|
||||||
Copyright (C) 2019-2021 classabbyamp, 0x5c
|
Copyright (C) 2019-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
@ -141,7 +141,8 @@ class QrmHelpCommand(commands.HelpCommand):
|
|||||||
embed.title = await self.get_command_signature(group)
|
embed.title = await self.get_command_signature(group)
|
||||||
embed.description = group.help
|
embed.description = group.help
|
||||||
for cmd in await self.filter_commands(group.commands, sort=True):
|
for cmd in await self.filter_commands(group.commands, sort=True):
|
||||||
embed.add_field(name=await self.get_command_signature(cmd), value=cmd.help, inline=False)
|
embed.add_field(name=await self.get_command_signature(cmd), value=cmd.help if cmd.help else "",
|
||||||
|
inline=False)
|
||||||
await self.context.send(embed=embed)
|
await self.context.send(embed=embed)
|
||||||
|
|
||||||
|
|
||||||
@ -177,7 +178,7 @@ class BaseCog(commands.Cog):
|
|||||||
|
|
||||||
@commands.Cog.listener()
|
@commands.Cog.listener()
|
||||||
async def on_ready(self):
|
async def on_ready(self):
|
||||||
if not self.bot_invite:
|
if not self.bot_invite and self.bot.user:
|
||||||
self.bot_invite = (f"https://discordapp.com/oauth2/authorize?client_id={self.bot.user.id}"
|
self.bot_invite = (f"https://discordapp.com/oauth2/authorize?client_id={self.bot.user.id}"
|
||||||
f"&scope=bot&permissions={opt.invite_perms}")
|
f"&scope=bot&permissions={opt.invite_perms}")
|
||||||
|
|
||||||
@ -196,7 +197,8 @@ class BaseCog(commands.Cog):
|
|||||||
inline=False)
|
inline=False)
|
||||||
if opt.enable_invite_cmd and (await self.bot.application_info()).bot_public:
|
if opt.enable_invite_cmd and (await self.bot.application_info()).bot_public:
|
||||||
embed.add_field(name="Invite qrm to Your Server", value=self.bot_invite, inline=False)
|
embed.add_field(name="Invite qrm to Your Server", value=self.bot_invite, inline=False)
|
||||||
embed.set_thumbnail(url=str(self.bot.user.avatar_url))
|
if self.bot.user and self.bot.user.avatar:
|
||||||
|
embed.set_thumbnail(url=str(self.bot.user.avatar.url))
|
||||||
await ctx.send(embed=embed)
|
await ctx.send(embed=embed)
|
||||||
|
|
||||||
@commands.command(name="ping", aliases=["beep"], category=cmn.BoltCats.INFO)
|
@commands.command(name="ping", aliases=["beep"], category=cmn.BoltCats.INFO)
|
||||||
|
@ -2,18 +2,16 @@
|
|||||||
Callsign Lookup extension for qrm
|
Callsign Lookup extension for qrm
|
||||||
---
|
---
|
||||||
Copyright (C) 2019-2020 classabbyamp, 0x5c (as qrz.py)
|
Copyright (C) 2019-2020 classabbyamp, 0x5c (as qrz.py)
|
||||||
Copyright (C) 2021 classabbyamp, 0x5c
|
Copyright (C) 2021-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
from qrztools import qrztools, QrzAsync, QrzError
|
from callsignlookuptools import QrzAsyncClient, CallsignLookupError, CallsignData
|
||||||
from gridtools import Grid, LatLong
|
|
||||||
|
|
||||||
from discord.ext import commands
|
from discord.ext import commands
|
||||||
|
|
||||||
@ -29,14 +27,16 @@ class QRZCog(commands.Cog):
|
|||||||
self.qrz = None
|
self.qrz = None
|
||||||
try:
|
try:
|
||||||
if keys.qrz_user and keys.qrz_pass:
|
if keys.qrz_user and keys.qrz_pass:
|
||||||
self.qrz = QrzAsync(keys.qrz_user, keys.qrz_pass, useragent="discord-qrm2",
|
|
||||||
session=aiohttp.ClientSession(connector=bot.qrm.connector))
|
|
||||||
# seed the qrz object with the previous session key, in case it already works
|
# seed the qrz object with the previous session key, in case it already works
|
||||||
|
session_key = ""
|
||||||
try:
|
try:
|
||||||
with open("data/qrz_session") as qrz_file:
|
with open("data/qrz_session") as qrz_file:
|
||||||
self.qrz.session_key = qrz_file.readline().strip()
|
session_key = qrz_file.readline().strip()
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
pass
|
pass
|
||||||
|
self.qrz = QrzAsyncClient(username=keys.qrz_user, password=keys.qrz_pass, useragent="discord-qrm2",
|
||||||
|
session_key=session_key,
|
||||||
|
session=aiohttp.ClientSession(connector=bot.qrm.connector))
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -63,69 +63,65 @@ class QRZCog(commands.Cog):
|
|||||||
|
|
||||||
async with ctx.typing():
|
async with ctx.typing():
|
||||||
try:
|
try:
|
||||||
data = await self.qrz.get_callsign(callsign)
|
data = await self.qrz.search(callsign)
|
||||||
except QrzError as e:
|
except CallsignLookupError as e:
|
||||||
embed.colour = cmn.colours.bad
|
embed.colour = cmn.colours.bad
|
||||||
embed.description = str(e)
|
embed.description = str(e)
|
||||||
await ctx.send(embed=embed)
|
await ctx.send(embed=embed)
|
||||||
return
|
return
|
||||||
|
|
||||||
embed.title = f"QRZ Data for {data.call}"
|
embed.title = f"QRZ Data for {data.callsign}"
|
||||||
embed.colour = cmn.colours.good
|
embed.colour = cmn.colours.good
|
||||||
embed.url = data.url
|
embed.url = data.url
|
||||||
if data.image != qrztools.QrzImage():
|
if data.image is not None:
|
||||||
embed.set_thumbnail(url=data.image.url)
|
embed.set_thumbnail(url=data.image.url)
|
||||||
|
|
||||||
for title, val in qrz_process_info(data).items():
|
for title, val in qrz_process_info(data).items():
|
||||||
if val:
|
if val is not None and (val := str(val)):
|
||||||
embed.add_field(name=title, value=val, inline=True)
|
embed.add_field(name=title, value=val, inline=True)
|
||||||
await ctx.send(embed=embed)
|
await ctx.send(embed=embed)
|
||||||
|
|
||||||
|
|
||||||
def qrz_process_info(data: qrztools.QrzCallsignData) -> Dict:
|
def qrz_process_info(data: CallsignData) -> Dict:
|
||||||
if data.name != qrztools.Name():
|
if data.name is not None:
|
||||||
if opt.qrz_only_nickname:
|
if opt.qrz_only_nickname:
|
||||||
if data.name.nickname:
|
nm = data.name.name if data.name.name is not None else ""
|
||||||
name = data.name.nickname + " " + data.name.name
|
if data.name.nickname is not None:
|
||||||
|
name = data.name.nickname + " " + nm
|
||||||
elif data.name.first:
|
elif data.name.first:
|
||||||
name = data.name.first + " " + data.name.name
|
name = data.name.first + " " + nm
|
||||||
else:
|
else:
|
||||||
name = data.name.name
|
name = nm
|
||||||
else:
|
else:
|
||||||
name = data.name.formatted_name
|
name = data.name
|
||||||
else:
|
else:
|
||||||
name = None
|
name = None
|
||||||
|
|
||||||
if data.address != qrztools.Address():
|
qsl = dict()
|
||||||
state = ", " + data.address.state + " " if data.address.state else ""
|
if data.qsl is not None:
|
||||||
address = "\n".join(
|
qsl = {
|
||||||
[x for x
|
"eQSL?": data.qsl.eqsl,
|
||||||
in [data.address.attn, data.address.line1, data.address.line2 + state, data.address.zip]
|
"Paper QSL?": data.qsl.mail,
|
||||||
if x]
|
"LotW?": data.qsl.lotw,
|
||||||
)
|
"QSL Info": data.qsl.info,
|
||||||
else:
|
}
|
||||||
address = None
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"Name": name,
|
"Name": name,
|
||||||
"Country": data.address.country,
|
"Country": data.address.country if data.address is not None else None,
|
||||||
"Address": address,
|
"Address": data.address,
|
||||||
"Grid Square": data.grid if data.grid != Grid(LatLong(0, 0)) else None,
|
"Grid Square": data.grid,
|
||||||
"County": data.county if data.county else None,
|
"County": data.county,
|
||||||
"CQ Zone": data.cq_zone if data.cq_zone else None,
|
"CQ Zone": data.cq_zone,
|
||||||
"ITU Zone": data.itu_zone if data.itu_zone else None,
|
"ITU Zone": data.itu_zone,
|
||||||
"IOTA Designator": data.iota if data.iota else None,
|
"IOTA Designator": data.iota,
|
||||||
"Expires": f"{data.expire_date:%Y-%m-%d}" if data.expire_date != datetime.min else None,
|
"Expires": f"{data.expire_date:%Y-%m-%d}" if data.expire_date is not None else None,
|
||||||
"Aliases": ", ".join(data.aliases) if data.aliases else None,
|
"Aliases": ", ".join(data.aliases) if data.aliases else None,
|
||||||
"Previous Callsign": data.prev_call if data.prev_call else None,
|
"Previous Callsign": data.prev_call,
|
||||||
"License Class": data.lic_class if data.lic_class else None,
|
"License Class": data.lic_class,
|
||||||
"Trustee": data.trustee if data.trustee else None,
|
"Trustee": data.trustee,
|
||||||
"eQSL?": "Yes" if data.eqsl else "No",
|
"Born": data.born,
|
||||||
"Paper QSL?": "Yes" if data.mail_qsl else "No",
|
} | qsl
|
||||||
"LotW?": "Yes" if data.lotw_qsl else "No",
|
|
||||||
"QSL Info": data.qsl_manager if data.qsl_manager else None,
|
|
||||||
"Born": f"{data.born:%Y-%m-%d}" if data.born != datetime.min else None
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def setup(bot):
|
def setup(bot):
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
Codes extension for qrm
|
Codes extension for qrm
|
||||||
---
|
---
|
||||||
Copyright (C) 2019-2021 classabbyamp, 0x5c (as ham.py)
|
Copyright (C) 2019-2021 classabbyamp, 0x5c (as ham.py)
|
||||||
Copyright (C) 2021 classabbyamp, 0x5c
|
Copyright (C) 2021-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Contest Calendar extension for qrm
|
Contest Calendar extension for qrm
|
||||||
---
|
---
|
||||||
Copyright (C) 2021 classabbyamp, 0x5c
|
Copyright (C) 2021-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Conversion extension for qrm
|
Conversion extension for qrm
|
||||||
---
|
---
|
||||||
Copyright (C) 2020-2021 classabbyamp, 0x5c
|
Copyright (C) 2020-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
@ -187,7 +187,7 @@ def _calc_volt(db: float, ref: float):
|
|||||||
|
|
||||||
# testing code
|
# testing code
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
while(True):
|
while True:
|
||||||
try:
|
try:
|
||||||
ip = input("> ").split()
|
ip = input("> ").split()
|
||||||
initial = float(ip[0])
|
initial = float(ip[0])
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
DXCC Prefix Lookup extension for qrm
|
DXCC Prefix Lookup extension for qrm
|
||||||
---
|
---
|
||||||
Copyright (C) 2019-2020 classabbyamp, 0x5c (as lookup.py)
|
Copyright (C) 2019-2020 classabbyamp, 0x5c (as lookup.py)
|
||||||
Copyright (C) 2021 classabbyamp, 0x5c
|
Copyright (C) 2021-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Fun extension for qrm
|
Fun extension for qrm
|
||||||
---
|
---
|
||||||
Copyright (C) 2019-2021 classabbyamp, 0x5c
|
Copyright (C) 2019-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Grid extension for qrm
|
Grid extension for qrm
|
||||||
---
|
---
|
||||||
Copyright (C) 2019-2021 classabbyamp, 0x5c
|
Copyright (C) 2019-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Image extension for qrm
|
Image extension for qrm
|
||||||
---
|
---
|
||||||
Copyright (C) 2019-2021 classabbyamp, 0x5c
|
Copyright (C) 2019-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
|
@ -2,18 +2,16 @@
|
|||||||
Land Weather extension for qrm
|
Land Weather extension for qrm
|
||||||
---
|
---
|
||||||
Copyright (C) 2019-2020 classabbyamp, 0x5c (as weather.py)
|
Copyright (C) 2019-2020 classabbyamp, 0x5c (as weather.py)
|
||||||
Copyright (C) 2021 classabbyamp, 0x5c
|
Copyright (C) 2021-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
import re
|
import re
|
||||||
from typing import List
|
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
|
|
||||||
from discord import Embed
|
|
||||||
import discord.ext.commands as commands
|
import discord.ext.commands as commands
|
||||||
|
|
||||||
import common as cmn
|
import common as cmn
|
||||||
@ -102,7 +100,32 @@ class WeatherCog(commands.Cog):
|
|||||||
|
|
||||||
Airports should be given as an \
|
Airports should be given as an \
|
||||||
[ICAO code](https://en.wikipedia.org/wiki/List_of_airports_by_IATA_and_ICAO_code)."""
|
[ICAO code](https://en.wikipedia.org/wiki/List_of_airports_by_IATA_and_ICAO_code)."""
|
||||||
await ctx.send(embed=await self.gen_metar_taf_embed(ctx, airport, hours, False))
|
|
||||||
|
embed = cmn.embed_factory(ctx)
|
||||||
|
airport = airport.upper()
|
||||||
|
|
||||||
|
if not re.fullmatch(r"\w(\w|\d){2,3}", airport):
|
||||||
|
embed.title = "Invalid airport given!"
|
||||||
|
embed.colour = cmn.colours.bad
|
||||||
|
await ctx.send(embed=embed)
|
||||||
|
return
|
||||||
|
|
||||||
|
url = f"https://aviationweather.gov/api/data/metar?ids={airport}&format=raw&taf=false&hours={hours}"
|
||||||
|
async with self.session.get(url) as r:
|
||||||
|
if r.status != 200:
|
||||||
|
raise cmn.BotHTTPError(r)
|
||||||
|
metar = await r.text()
|
||||||
|
|
||||||
|
if hours > 0:
|
||||||
|
embed.title = f"METAR for {airport} for the last {hours} hour{'s' if hours > 1 else ''}"
|
||||||
|
else:
|
||||||
|
embed.title = f"Current METAR for {airport}"
|
||||||
|
|
||||||
|
embed.description = "Data from [aviationweather.gov](https://www.aviationweather.gov/)."
|
||||||
|
embed.colour = cmn.colours.good
|
||||||
|
embed.description += f"\n\n```\n{metar}\n```"
|
||||||
|
|
||||||
|
await ctx.send(embed=embed)
|
||||||
|
|
||||||
@commands.command(name="taf", category=cmn.Cats.WEATHER)
|
@commands.command(name="taf", category=cmn.Cats.WEATHER)
|
||||||
async def taf(self, ctx: commands.Context, airport: str):
|
async def taf(self, ctx: commands.Context, airport: str):
|
||||||
@ -110,57 +133,28 @@ class WeatherCog(commands.Cog):
|
|||||||
|
|
||||||
Airports should be given as an \
|
Airports should be given as an \
|
||||||
[ICAO code](https://en.wikipedia.org/wiki/List_of_airports_by_IATA_and_ICAO_code)."""
|
[ICAO code](https://en.wikipedia.org/wiki/List_of_airports_by_IATA_and_ICAO_code)."""
|
||||||
await ctx.send(embed=await self.gen_metar_taf_embed(ctx, airport, 0, True))
|
|
||||||
|
|
||||||
async def gen_metar_taf_embed(self, ctx: commands.Context, airport: str, hours: int, taf: bool) -> Embed:
|
|
||||||
embed = cmn.embed_factory(ctx)
|
embed = cmn.embed_factory(ctx)
|
||||||
airport = airport.upper()
|
airport = airport.upper()
|
||||||
|
|
||||||
if re.fullmatch(r"\w(\w|\d){2,3}", airport):
|
if not re.fullmatch(r"\w(\w|\d){2,3}", airport):
|
||||||
metar = await self.get_metar_taf_data(airport, hours, taf)
|
|
||||||
|
|
||||||
if taf:
|
|
||||||
embed.title = f"Current TAF for {airport}"
|
|
||||||
elif hours > 0:
|
|
||||||
embed.title = f"METAR for {airport} for the last {hours} hour{'s' if hours > 1 else ''}"
|
|
||||||
else:
|
|
||||||
embed.title = f"Current METAR for {airport}"
|
|
||||||
|
|
||||||
embed.description = "Data from [aviationweather.gov](https://www.aviationweather.gov/metar/data)."
|
|
||||||
embed.colour = cmn.colours.good
|
|
||||||
|
|
||||||
data = "\n".join(metar)
|
|
||||||
embed.description += f"\n\n```\n{data}\n```"
|
|
||||||
else:
|
|
||||||
embed.title = "Invalid airport given!"
|
embed.title = "Invalid airport given!"
|
||||||
embed.colour = cmn.colours.bad
|
embed.colour = cmn.colours.bad
|
||||||
return embed
|
await ctx.send(embed=embed)
|
||||||
|
return
|
||||||
|
|
||||||
async def get_metar_taf_data(self, airport: str, hours: int, taf: bool) -> List[str]:
|
url = f"https://aviationweather.gov/api/data/taf?ids={airport}&format=raw&metar=true"
|
||||||
url = (f"https://www.aviationweather.gov/metar/data?ids={airport}&format=raw&hours={hours}"
|
|
||||||
f"&taf={'on' if taf else 'off'}&layout=off")
|
|
||||||
async with self.session.get(url) as r:
|
async with self.session.get(url) as r:
|
||||||
if r.status != 200:
|
if r.status != 200:
|
||||||
raise cmn.BotHTTPError(r)
|
raise cmn.BotHTTPError(r)
|
||||||
page = await r.text()
|
taf = await r.text()
|
||||||
|
|
||||||
# pare down to just the data
|
embed.title = f"Current TAF for {airport}"
|
||||||
page = page.split("<!-- Data starts here -->")[1].split("<!-- Data ends here -->")[0].strip()
|
embed.description = "Data from [aviationweather.gov](https://www.aviationweather.gov/)."
|
||||||
# split at <hr>s
|
embed.colour = cmn.colours.good
|
||||||
data = re.split(r"<hr.*>", page, maxsplit=len(airport))
|
embed.description += f"\n\n```\n{taf}\n```"
|
||||||
|
|
||||||
parsed = []
|
await ctx.send(embed=embed)
|
||||||
for sec in data:
|
|
||||||
if sec.strip():
|
|
||||||
for line in sec.split("\n"):
|
|
||||||
line = line.strip()
|
|
||||||
# remove HTML stuff
|
|
||||||
line = line.replace("<code>", "").replace("</code>", "")
|
|
||||||
line = line.replace("<strong>", "").replace("</strong>", "")
|
|
||||||
line = line.replace("<br/>", "\n").replace(" ", " ")
|
|
||||||
line = line.strip("\n")
|
|
||||||
parsed.append(line)
|
|
||||||
return parsed
|
|
||||||
|
|
||||||
|
|
||||||
def setup(bot: commands.Bot):
|
def setup(bot: commands.Bot):
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Morse Code extension for qrm
|
Morse Code extension for qrm
|
||||||
---
|
---
|
||||||
Copyright (C) 2019-2021 classabbyamp, 0x5c
|
Copyright (C) 2019-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Prefixes Lookup extension for qrm
|
Prefixes Lookup extension for qrm
|
||||||
---
|
---
|
||||||
Copyright (C) 2021 classabbyamp, 0x5c
|
Copyright (C) 2021-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
|
@ -1,17 +1,17 @@
|
|||||||
"""
|
"""
|
||||||
Propagation extension for qrm
|
Propagation extension for qrm
|
||||||
---
|
---
|
||||||
Copyright (C) 2019-2021 classabbyamp, 0x5c
|
Copyright (C) 2019-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
import cairosvg
|
import cairosvg
|
||||||
from datetime import datetime
|
import httpx
|
||||||
|
|
||||||
import discord
|
import discord
|
||||||
import discord.ext.commands as commands
|
import discord.ext.commands as commands
|
||||||
@ -23,19 +23,22 @@ class PropagationCog(commands.Cog):
|
|||||||
muf_url = "https://prop.kc2g.com/renders/current/mufd-normal-now.svg"
|
muf_url = "https://prop.kc2g.com/renders/current/mufd-normal-now.svg"
|
||||||
fof2_url = "https://prop.kc2g.com/renders/current/fof2-normal-now.svg"
|
fof2_url = "https://prop.kc2g.com/renders/current/fof2-normal-now.svg"
|
||||||
gl_baseurl = "https://www.fourmilab.ch/cgi-bin/uncgi/Earth?img=ETOPO1_day-m.evif&dynimg=y&opt=-p"
|
gl_baseurl = "https://www.fourmilab.ch/cgi-bin/uncgi/Earth?img=ETOPO1_day-m.evif&dynimg=y&opt=-p"
|
||||||
n0nbh_sun_url = "http://www.hamqsl.com/solarsun.php"
|
n0nbh_sun_url = "https://www.hamqsl.com/solarsun.php"
|
||||||
|
noaa_drap_url = "https://services.swpc.noaa.gov/images/animations/d-rap/global/d-rap/latest.png"
|
||||||
|
|
||||||
def __init__(self, bot):
|
def __init__(self, bot):
|
||||||
self.bot = bot
|
self.bot = bot
|
||||||
self.session = aiohttp.ClientSession(connector=bot.qrm.connector)
|
self.httpx_client: httpx.AsyncClient = bot.qrm.httpx_client
|
||||||
|
|
||||||
@commands.command(name="mufmap", aliases=["muf"], category=cmn.Cats.WEATHER)
|
@commands.command(name="mufmap", aliases=["muf"], category=cmn.Cats.WEATHER)
|
||||||
async def mufmap(self, ctx: commands.Context):
|
async def mufmap(self, ctx: commands.Context):
|
||||||
"""Shows a world map of the Maximum Usable Frequency (MUF)."""
|
"""Shows a world map of the Maximum Usable Frequency (MUF)."""
|
||||||
async with ctx.typing():
|
async with ctx.typing():
|
||||||
async with self.session.get(self.muf_url) as r:
|
resp = await self.httpx_client.get(self.muf_url)
|
||||||
svg = await r.read()
|
await resp.aclose()
|
||||||
out = BytesIO(cairosvg.svg2png(bytestring=svg))
|
if resp.status_code != 200:
|
||||||
|
raise cmn.BotHTTPError(resp)
|
||||||
|
out = BytesIO(cairosvg.svg2png(bytestring=await resp.aread()))
|
||||||
file = discord.File(out, "muf_map.png")
|
file = discord.File(out, "muf_map.png")
|
||||||
embed = cmn.embed_factory(ctx)
|
embed = cmn.embed_factory(ctx)
|
||||||
embed.title = "Maximum Usable Frequency Map"
|
embed.title = "Maximum Usable Frequency Map"
|
||||||
@ -47,9 +50,11 @@ class PropagationCog(commands.Cog):
|
|||||||
async def fof2map(self, ctx: commands.Context):
|
async def fof2map(self, ctx: commands.Context):
|
||||||
"""Shows a world map of the Critical Frequency (foF2)."""
|
"""Shows a world map of the Critical Frequency (foF2)."""
|
||||||
async with ctx.typing():
|
async with ctx.typing():
|
||||||
async with self.session.get(self.fof2_url) as r:
|
resp = await self.httpx_client.get(self.fof2_url)
|
||||||
svg = await r.read()
|
await resp.aclose()
|
||||||
out = BytesIO(cairosvg.svg2png(bytestring=svg))
|
if resp.status_code != 200:
|
||||||
|
raise cmn.BotHTTPError(resp)
|
||||||
|
out = BytesIO(cairosvg.svg2png(bytestring=await resp.aread()))
|
||||||
file = discord.File(out, "fof2_map.png")
|
file = discord.File(out, "fof2_map.png")
|
||||||
embed = cmn.embed_factory(ctx)
|
embed = cmn.embed_factory(ctx)
|
||||||
embed.title = "Critical Frequency (foF2) Map"
|
embed.title = "Critical Frequency (foF2) Map"
|
||||||
@ -67,18 +72,30 @@ class PropagationCog(commands.Cog):
|
|||||||
embed.set_image(url=self.gl_baseurl + date_params)
|
embed.set_image(url=self.gl_baseurl + date_params)
|
||||||
await ctx.send(embed=embed)
|
await ctx.send(embed=embed)
|
||||||
|
|
||||||
@commands.command(name="solarweather", aliases=["solar", "bandconditions", "cond", "condx", "conditions"],
|
@commands.command(name="solarweather", aliases=["solar"], category=cmn.Cats.WEATHER)
|
||||||
category=cmn.Cats.WEATHER)
|
|
||||||
async def solarweather(self, ctx: commands.Context):
|
async def solarweather(self, ctx: commands.Context):
|
||||||
"""Gets a solar weather report."""
|
"""Gets a solar weather report."""
|
||||||
|
resp = await self.httpx_client.get(self.n0nbh_sun_url)
|
||||||
|
await resp.aclose()
|
||||||
|
if resp.status_code != 200:
|
||||||
|
raise cmn.BotHTTPError(resp)
|
||||||
|
img = BytesIO(await resp.aread())
|
||||||
|
file = discord.File(img, "solarweather.png")
|
||||||
embed = cmn.embed_factory(ctx)
|
embed = cmn.embed_factory(ctx)
|
||||||
embed.title = "☀️ Current Solar Weather"
|
embed.title = "☀️ Current Solar Weather"
|
||||||
if ctx.invoked_with in ["bandconditions", "cond", "condx", "conditions"]:
|
|
||||||
embed.add_field(name="⚠️ Deprecated Command Alias",
|
|
||||||
value=(f"This command has been renamed to `{ctx.prefix}solar`!\n"
|
|
||||||
"The alias you used will be removed in the next version."))
|
|
||||||
embed.colour = cmn.colours.good
|
embed.colour = cmn.colours.good
|
||||||
embed.set_image(url=self.n0nbh_sun_url)
|
embed.set_image(url="attachment://solarweather.png")
|
||||||
|
await ctx.send(file=file, embed=embed)
|
||||||
|
|
||||||
|
@commands.command(name="drapmap", aliases=["drap"], category=cmn.Cats.WEATHER)
|
||||||
|
async def drapmap(self, ctx: commands.Context):
|
||||||
|
"""Gets the current D-RAP map for radio blackouts"""
|
||||||
|
embed = cmn.embed_factory(ctx)
|
||||||
|
embed.title = "D Region Absorption Predictions (D-RAP) Map"
|
||||||
|
embed.colour = cmn.colours.good
|
||||||
|
embed.description = \
|
||||||
|
"Image from [swpc.noaa.gov](https://www.swpc.noaa.gov/products/d-region-absorption-predictions-d-rap)"
|
||||||
|
embed.set_image(url=self.noaa_drap_url)
|
||||||
await ctx.send(embed=embed)
|
await ctx.send(embed=embed)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Study extension for qrm
|
Study extension for qrm
|
||||||
---
|
---
|
||||||
Copyright (C) 2019-2021 classabbyamp, 0x5c
|
Copyright (C) 2019-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
@ -159,13 +159,13 @@ class StudyCog(commands.Cog):
|
|||||||
await cmn.add_react(q_msg, list(self.choices.values())[i])
|
await cmn.add_react(q_msg, list(self.choices.values())[i])
|
||||||
await cmn.add_react(q_msg, cmn.emojis.question)
|
await cmn.add_react(q_msg, cmn.emojis.question)
|
||||||
|
|
||||||
def check(reaction, user):
|
def check(ev):
|
||||||
return (user.id != self.bot.user.id
|
return (ev.user_id != self.bot.user.id
|
||||||
and reaction.message.id == q_msg.id
|
and ev.message_id == q_msg.id
|
||||||
and (str(reaction.emoji) in self.choices.values() or str(reaction.emoji) == cmn.emojis.question))
|
and (str(ev.emoji) in self.choices.values() or str(ev.emoji) == cmn.emojis.question))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
reaction, user = await self.bot.wait_for("reaction_add", timeout=300.0, check=check)
|
ev = await self.bot.wait_for("raw_reaction_add", timeout=300.0, check=check)
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
embed.set_field_at(1, name="Answers", value=answers_str_bolded, inline=False)
|
embed.set_field_at(1, name="Answers", value=answers_str_bolded, inline=False)
|
||||||
embed.set_field_at(2, name="Answer",
|
embed.set_field_at(2, name="Answer",
|
||||||
@ -174,16 +174,18 @@ class StudyCog(commands.Cog):
|
|||||||
embed.colour = cmn.colours.timeout
|
embed.colour = cmn.colours.timeout
|
||||||
await q_msg.edit(embed=embed)
|
await q_msg.edit(embed=embed)
|
||||||
else:
|
else:
|
||||||
if str(reaction.emoji) == cmn.emojis.question:
|
if str(ev.emoji) == cmn.emojis.question:
|
||||||
embed.set_field_at(1, name="Answers", value=answers_str_bolded, inline=False)
|
embed.set_field_at(1, name="Answers", value=answers_str_bolded, inline=False)
|
||||||
embed.set_field_at(2, name="Answer",
|
embed.set_field_at(2, name="Answer",
|
||||||
value=f"The correct answer was {self.choices[question['answer']]}", inline=False)
|
value=f"The correct answer was {self.choices[question['answer']]}", inline=False)
|
||||||
embed.add_field(name="Answer Requested By", value=str(user), inline=False)
|
# only available in guilds, but it only makes sense there
|
||||||
|
if ev.member:
|
||||||
|
embed.add_field(name="Answer Requested By", value=str(ev.member), inline=False)
|
||||||
embed.colour = cmn.colours.timeout
|
embed.colour = cmn.colours.timeout
|
||||||
await q_msg.edit(embed=embed)
|
await q_msg.edit(embed=embed)
|
||||||
else:
|
else:
|
||||||
answers_str_checked = ""
|
answers_str_checked = ""
|
||||||
chosen_ans = self.choices_inv[str(reaction.emoji)]
|
chosen_ans = self.choices_inv[str(ev.emoji)]
|
||||||
for letter, ans in answers.items():
|
for letter, ans in answers.items():
|
||||||
answers_str_checked += f"{self.choices[letter]}"
|
answers_str_checked += f"{self.choices[letter]}"
|
||||||
if letter == question["answer"] == chosen_ans:
|
if letter == question["answer"] == chosen_ans:
|
||||||
@ -195,19 +197,23 @@ class StudyCog(commands.Cog):
|
|||||||
else:
|
else:
|
||||||
answers_str_checked += f" {ans}\n"
|
answers_str_checked += f" {ans}\n"
|
||||||
|
|
||||||
if self.choices[question["answer"]] == str(reaction.emoji):
|
if self.choices[question["answer"]] == str(ev.emoji):
|
||||||
embed.set_field_at(1, name="Answers", value=answers_str_checked, inline=False)
|
embed.set_field_at(1, name="Answers", value=answers_str_checked, inline=False)
|
||||||
embed.set_field_at(2, name="Answer", value=(f"{cmn.emojis.check_mark} "
|
embed.set_field_at(2, name="Answer", value=(f"{cmn.emojis.check_mark} "
|
||||||
f"**Correct!** The answer was {reaction.emoji}"))
|
f"**Correct!** The answer was {ev.emoji}"))
|
||||||
embed.add_field(name="Answered By", value=str(user), inline=False)
|
# only available in guilds, but it only makes sense there
|
||||||
|
if ev.member:
|
||||||
|
embed.add_field(name="Answered By", value=str(ev.member), inline=False)
|
||||||
embed.colour = cmn.colours.good
|
embed.colour = cmn.colours.good
|
||||||
await q_msg.edit(embed=embed)
|
await q_msg.edit(embed=embed)
|
||||||
else:
|
else:
|
||||||
embed.set_field_at(1, name="Answers", value=answers_str_checked, inline=False)
|
embed.set_field_at(1, name="Answers", value=answers_str_checked, inline=False)
|
||||||
embed.set_field_at(2, name="Answer",
|
embed.set_field_at(2, name="Answer",
|
||||||
value=(f"{cmn.emojis.x} **Incorrect!** The correct answer was "
|
value=(f"{cmn.emojis.x} **Incorrect!** The correct answer was "
|
||||||
f"{self.choices[question['answer']]}, not {reaction.emoji}"))
|
f"{self.choices[question['answer']]}, not {ev.emoji}"))
|
||||||
embed.add_field(name="Answered By", value=str(user), inline=False)
|
# only available in guilds, but it only makes sense there
|
||||||
|
if ev.member:
|
||||||
|
embed.add_field(name="Answered By", value=str(ev.member), inline=False)
|
||||||
embed.colour = cmn.colours.bad
|
embed.colour = cmn.colours.bad
|
||||||
await q_msg.edit(embed=embed)
|
await q_msg.edit(embed=embed)
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
TeX extension for qrm
|
TeX extension for qrm
|
||||||
---
|
---
|
||||||
Copyright (C) 2021 classabbyamp, 0x5c
|
Copyright (C) 2021-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Time extension for qrm
|
Time extension for qrm
|
||||||
---
|
---
|
||||||
Copyright (C) 2021 classabbyamp, 0x5c
|
Copyright (C) 2021-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
|
6
info.py
6
info.py
@ -1,18 +1,18 @@
|
|||||||
"""
|
"""
|
||||||
Static info about the bot.
|
Static info about the bot.
|
||||||
---
|
---
|
||||||
Copyright (C) 2019-2021 classabbyamp, 0x5c
|
Copyright (C) 2019-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
authors = ("@ClassAbbyAmplifier#2229", "@0x5c#0639")
|
authors = ("@classabbyamp", "@0x5c.io")
|
||||||
description = """A bot with various useful ham radio-related functions, written in Python."""
|
description = """A bot with various useful ham radio-related functions, written in Python."""
|
||||||
license = "Québec Free and Open-Source Licence – Strong Reciprocity (LiLiQ-R+), version 1.1"
|
license = "Québec Free and Open-Source Licence – Strong Reciprocity (LiLiQ-R+), version 1.1"
|
||||||
contributing = """Check out the [source on GitHub](https://github.com/miaowware/qrm2). Contributions are welcome!
|
contributing = """Check out the [source on GitHub](https://github.com/miaowware/qrm2). Contributions are welcome!
|
||||||
|
|
||||||
All issues and requests related to resources (including maps, band charts, data) should be added \
|
All issues and requests related to resources (including maps, band charts, data) should be added \
|
||||||
in [miaowware/qrm-resources](https://github.com/miaowware/qrm-resources)."""
|
in [miaowware/qrm-resources](https://github.com/miaowware/qrm-resources)."""
|
||||||
release = "2.7.4"
|
release = "2.9.2"
|
||||||
bot_server = "https://discord.gg/Ntbg3J4"
|
bot_server = "https://discord.gg/Ntbg3J4"
|
||||||
|
25
main.py
25
main.py
@ -2,7 +2,7 @@
|
|||||||
"""
|
"""
|
||||||
qrm, a bot for Discord
|
qrm, a bot for Discord
|
||||||
---
|
---
|
||||||
Copyright (C) 2019-2021 classabbyamp, 0x5c
|
Copyright (C) 2019-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
@ -16,6 +16,7 @@ from datetime import datetime, time
|
|||||||
from types import SimpleNamespace
|
from types import SimpleNamespace
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
import httpx
|
||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
import discord
|
import discord
|
||||||
@ -49,9 +50,9 @@ connector = loop.run_until_complete(conn.new_connector())
|
|||||||
# Defining the intents
|
# Defining the intents
|
||||||
intents = discord.Intents.none()
|
intents = discord.Intents.none()
|
||||||
intents.guilds = True
|
intents.guilds = True
|
||||||
intents.guild_messages = True
|
intents.messages = True
|
||||||
intents.dm_messages = True
|
|
||||||
intents.reactions = True
|
intents.reactions = True
|
||||||
|
intents.message_content = True
|
||||||
|
|
||||||
member_cache = discord.MemberCacheFlags.from_intents(intents)
|
member_cache = discord.MemberCacheFlags.from_intents(intents)
|
||||||
|
|
||||||
@ -69,6 +70,8 @@ bot.qrm = SimpleNamespace()
|
|||||||
# Let's store stuff here.
|
# Let's store stuff here.
|
||||||
bot.qrm.connector = connector
|
bot.qrm.connector = connector
|
||||||
bot.qrm.debug_mode = debug_mode
|
bot.qrm.debug_mode = debug_mode
|
||||||
|
# TODO: Add code to close the client
|
||||||
|
bot.qrm.httpx_client = httpx.AsyncClient()
|
||||||
|
|
||||||
|
|
||||||
# --- Commands ---
|
# --- Commands ---
|
||||||
@ -81,7 +84,7 @@ async def _restart_bot(ctx: commands.Context):
|
|||||||
await cmn.add_react(ctx.message, cmn.emojis.check_mark)
|
await cmn.add_react(ctx.message, cmn.emojis.check_mark)
|
||||||
print(f"[**] Restarting! Requested by {ctx.author}.")
|
print(f"[**] Restarting! Requested by {ctx.author}.")
|
||||||
exit_code = 42 # Signals to the wrapper script that the bot needs to be restarted.
|
exit_code = 42 # Signals to the wrapper script that the bot needs to be restarted.
|
||||||
await bot.logout()
|
await bot.close()
|
||||||
|
|
||||||
|
|
||||||
@bot.command(name="shutdown", aliases=["shut"], category=cmn.BoltCats.ADMIN)
|
@bot.command(name="shutdown", aliases=["shut"], category=cmn.BoltCats.ADMIN)
|
||||||
@ -92,7 +95,7 @@ async def _shutdown_bot(ctx: commands.Context):
|
|||||||
await cmn.add_react(ctx.message, cmn.emojis.check_mark)
|
await cmn.add_react(ctx.message, cmn.emojis.check_mark)
|
||||||
print(f"[**] Shutting down! Requested by {ctx.author}.")
|
print(f"[**] Shutting down! Requested by {ctx.author}.")
|
||||||
exit_code = 0 # Signals to the wrapper script that the bot should not be restarted.
|
exit_code = 0 # Signals to the wrapper script that the bot should not be restarted.
|
||||||
await bot.logout()
|
await bot.close()
|
||||||
|
|
||||||
|
|
||||||
@bot.group(name="extctl", aliases=["ex"], case_insensitive=True, category=cmn.BoltCats.ADMIN)
|
@bot.group(name="extctl", aliases=["ex"], case_insensitive=True, category=cmn.BoltCats.ADMIN)
|
||||||
@ -123,10 +126,10 @@ async def _extctl_load(ctx: commands.Context, extension: str):
|
|||||||
"""Loads an extension."""
|
"""Loads an extension."""
|
||||||
try:
|
try:
|
||||||
bot.load_extension(ext_dir + "." + extension)
|
bot.load_extension(ext_dir + "." + extension)
|
||||||
except commands.ExtensionNotFound as e:
|
except discord.errors.ExtensionNotFound as e:
|
||||||
try:
|
try:
|
||||||
bot.load_extension(plugin_dir + "." + extension)
|
bot.load_extension(plugin_dir + "." + extension)
|
||||||
except commands.ExtensionNotFound:
|
except discord.errors.ExtensionNotFound:
|
||||||
raise e
|
raise e
|
||||||
await cmn.add_react(ctx.message, cmn.emojis.check_mark)
|
await cmn.add_react(ctx.message, cmn.emojis.check_mark)
|
||||||
|
|
||||||
@ -140,10 +143,10 @@ async def _extctl_reload(ctx: commands.Context, extension: str):
|
|||||||
await cmn.add_react(ctx.message, pika)
|
await cmn.add_react(ctx.message, pika)
|
||||||
try:
|
try:
|
||||||
bot.reload_extension(ext_dir + "." + extension)
|
bot.reload_extension(ext_dir + "." + extension)
|
||||||
except commands.ExtensionNotLoaded as e:
|
except discord.errors.ExtensionNotLoaded as e:
|
||||||
try:
|
try:
|
||||||
bot.reload_extension(plugin_dir + "." + extension)
|
bot.reload_extension(plugin_dir + "." + extension)
|
||||||
except commands.ExtensionNotLoaded:
|
except discord.errors.ExtensionNotLoaded:
|
||||||
raise e
|
raise e
|
||||||
await cmn.add_react(ctx.message, cmn.emojis.check_mark)
|
await cmn.add_react(ctx.message, cmn.emojis.check_mark)
|
||||||
|
|
||||||
@ -153,10 +156,10 @@ async def _extctl_unload(ctx: commands.Context, extension: str):
|
|||||||
"""Unloads an extension."""
|
"""Unloads an extension."""
|
||||||
try:
|
try:
|
||||||
bot.unload_extension(ext_dir + "." + extension)
|
bot.unload_extension(ext_dir + "." + extension)
|
||||||
except commands.ExtensionNotLoaded as e:
|
except discord.errors.ExtensionNotLoaded as e:
|
||||||
try:
|
try:
|
||||||
bot.unload_extension(plugin_dir + "." + extension)
|
bot.unload_extension(plugin_dir + "." + extension)
|
||||||
except commands.ExtensionNotLoaded:
|
except discord.errors.ExtensionNotLoaded:
|
||||||
raise e
|
raise e
|
||||||
await cmn.add_react(ctx.message, cmn.emojis.check_mark)
|
await cmn.add_react(ctx.message, cmn.emojis.check_mark)
|
||||||
|
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
discord.py~=1.7.3
|
py-cord-dev[speed]==2.5.0rc5
|
||||||
ctyparser~=2.0
|
ctyparser~=2.0
|
||||||
gridtools~=1.0
|
gridtools~=1.0
|
||||||
qrztools[async]~=1.0
|
callsignlookuptools[async]~=1.1
|
||||||
beautifulsoup4
|
beautifulsoup4
|
||||||
pytz
|
pytz
|
||||||
cairosvg
|
cairosvg
|
||||||
requests
|
httpx
|
||||||
pydantic
|
pydantic~=2.5
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Resource schemas generator for qrm2.
|
Resource schemas generator for qrm2.
|
||||||
---
|
---
|
||||||
Copyright (C) 2021 classabbyamp, 0x5c
|
Copyright (C) 2021-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Information about callsigns for the prefixes command in hamcog.
|
Information about callsigns for the prefixes command in hamcog.
|
||||||
---
|
---
|
||||||
Copyright (C) 2019-2021 classabbyamp, 0x5c
|
Copyright (C) 2019-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Information about callsigns for the CA prefixes command in hamcog.
|
Information about callsigns for the CA prefixes command in hamcog.
|
||||||
---
|
---
|
||||||
Copyright (C) 2019-2021 classabbyamp, 0x5c
|
Copyright (C) 2019-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Information about callsigns for the US prefixes command in hamcog.
|
Information about callsigns for the US prefixes command in hamcog.
|
||||||
---
|
---
|
||||||
Copyright (C) 2019-2021 classabbyamp, 0x5c
|
Copyright (C) 2019-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
A listing of hamstudy command resources
|
A listing of hamstudy command resources
|
||||||
---
|
---
|
||||||
Copyright (C) 2019-2021 classabbyamp, 0x5c
|
Copyright (C) 2019-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
|
12
run.sh
12
run.sh
@ -17,7 +17,7 @@ fi
|
|||||||
# Argument handling
|
# Argument handling
|
||||||
_PASS_ERRORS=0
|
_PASS_ERRORS=0
|
||||||
_NO_BOTENV=0
|
_NO_BOTENV=0
|
||||||
while [ ! -z "$1" ]; do
|
while [ -n "$1" ]; do
|
||||||
case $1 in
|
case $1 in
|
||||||
--pass-errors)
|
--pass-errors)
|
||||||
_PASS_ERRORS=1
|
_PASS_ERRORS=1
|
||||||
@ -34,9 +34,9 @@ while [ ! -z "$1" ]; do
|
|||||||
done
|
done
|
||||||
|
|
||||||
|
|
||||||
# If $PYTHON_BIN is not defined, default to 'python3.9'
|
# If $PYTHON_BIN is not defined, default to 'python3.11'
|
||||||
if [ $_NO_BOTENV -eq 1 -a -z "$PYTHON_BIN" ]; then
|
if [ $_NO_BOTENV -eq 1 ] && [ -z "$PYTHON_BIN" ]; then
|
||||||
PYTHON_BIN='python3.9'
|
PYTHON_BIN='python3.11'
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
@ -69,9 +69,9 @@ echo "$0: Starting bot..."
|
|||||||
# The loop
|
# The loop
|
||||||
while true; do
|
while true; do
|
||||||
if [ $_NO_BOTENV -eq 1 ]; then
|
if [ $_NO_BOTENV -eq 1 ]; then
|
||||||
"$PYTHON_BIN" main.py $@
|
"$PYTHON_BIN" main.py "$@"
|
||||||
else
|
else
|
||||||
./$BOTENV/bin/python3 main.py $@
|
"./$BOTENV/bin/python3" main.py "$@"
|
||||||
fi
|
fi
|
||||||
err=$?
|
err=$?
|
||||||
_message="$0: The bot exited with [$err]"
|
_message="$0: The bot exited with [$err]"
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Wrapper to handle aiohttp connector creation.
|
Wrapper to handle aiohttp connector creation.
|
||||||
---
|
---
|
||||||
Copyright (C) 2020-2021 classabbyamp, 0x5c
|
Copyright (C) 2020-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Resources manager for qrm2.
|
Resources manager for qrm2.
|
||||||
---
|
---
|
||||||
Copyright (C) 2021 classabbyamp, 0x5c
|
Copyright (C) 2021-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
@ -9,7 +9,7 @@ SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
|||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import requests
|
import httpx
|
||||||
|
|
||||||
from utils.resources_models import Index
|
from utils.resources_models import Index
|
||||||
|
|
||||||
@ -23,13 +23,16 @@ class ResourcesManager:
|
|||||||
|
|
||||||
def parse_index(self, index: str):
|
def parse_index(self, index: str):
|
||||||
"""Parses the index."""
|
"""Parses the index."""
|
||||||
return Index.parse_raw(index)
|
return Index.model_validate_json(index)
|
||||||
|
|
||||||
def sync_fetch(self, filepath: str):
|
def sync_fetch(self, filepath: str):
|
||||||
"""Fetches files in sync mode."""
|
"""Fetches files in sync mode."""
|
||||||
self.print_msg(f"Fetching {filepath}", "sync")
|
self.print_msg(f"Fetching {filepath}", "sync")
|
||||||
with requests.get(self.url + filepath) as resp:
|
resp = httpx.get(self.url + filepath)
|
||||||
return resp.content
|
resp.raise_for_status()
|
||||||
|
r = resp.content
|
||||||
|
resp.close()
|
||||||
|
return r
|
||||||
|
|
||||||
def sync_start(self, basedir: Path) -> Index:
|
def sync_start(self, basedir: Path) -> Index:
|
||||||
"""Takes cares of constructing the local resources repository and initialising the RM."""
|
"""Takes cares of constructing the local resources repository and initialising the RM."""
|
||||||
@ -40,7 +43,7 @@ class ResourcesManager:
|
|||||||
new_index: Index = self.parse_index(raw)
|
new_index: Index = self.parse_index(raw)
|
||||||
with (basedir / "index.json").open("wb") as file:
|
with (basedir / "index.json").open("wb") as file:
|
||||||
file.write(raw)
|
file.write(raw)
|
||||||
except (requests.RequestException, OSError) as ex:
|
except (httpx.RequestError, OSError) as ex:
|
||||||
self.print_msg(f"There was an issue fetching the index: {ex.__class__.__name__}: {ex}", "sync")
|
self.print_msg(f"There was an issue fetching the index: {ex.__class__.__name__}: {ex}", "sync")
|
||||||
if (basedir / "index.json").exists():
|
if (basedir / "index.json").exists():
|
||||||
self.print_msg("Old file exist, using old resources", "fallback")
|
self.print_msg("Old file exist, using old resources", "fallback")
|
||||||
@ -58,7 +61,7 @@ class ResourcesManager:
|
|||||||
try:
|
try:
|
||||||
with (basedir / file.filename).open("wb") as f:
|
with (basedir / file.filename).open("wb") as f:
|
||||||
f.write(self.sync_fetch(file.filename))
|
f.write(self.sync_fetch(file.filename))
|
||||||
except (requests.RequestException, OSError) as ex:
|
except (httpx.RequestError, OSError) as ex:
|
||||||
ex_cls = ex.__class__.__name__
|
ex_cls = ex.__class__.__name__
|
||||||
self.print_msg(f"There was an issue fetching {file.filename}: {ex_cls}: {ex}", "sync")
|
self.print_msg(f"There was an issue fetching {file.filename}: {ex_cls}: {ex}", "sync")
|
||||||
if not (basedir / file.filename).exists():
|
if not (basedir / file.filename).exists():
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""
|
"""
|
||||||
Resource index models for qrm2.
|
Resource index models for qrm2.
|
||||||
---
|
---
|
||||||
Copyright (C) 2021 classabbyamp, 0x5c
|
Copyright (C) 2021-2023 classabbyamp, 0x5c
|
||||||
|
|
||||||
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
||||||
"""
|
"""
|
||||||
@ -10,7 +10,7 @@ SPDX-License-Identifier: LiLiQ-Rplus-1.1
|
|||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel, RootModel
|
||||||
|
|
||||||
|
|
||||||
class File(BaseModel):
|
class File(BaseModel):
|
||||||
@ -22,18 +22,17 @@ class File(BaseModel):
|
|||||||
return repr(self)
|
return repr(self)
|
||||||
|
|
||||||
|
|
||||||
class Resource(BaseModel, Mapping):
|
class Resource(RootModel, Mapping):
|
||||||
# 'A Beautiful Hack' https://github.com/samuelcolvin/pydantic/issues/1802
|
root: dict[str, list[File]]
|
||||||
__root__: dict[str, list[File]]
|
|
||||||
|
|
||||||
def __getitem__(self, key: str) -> list[File]:
|
def __getitem__(self, key: str) -> list[File]:
|
||||||
return self.__root__[key]
|
return self.root[key]
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return iter(self.__root__)
|
return iter(self.root)
|
||||||
|
|
||||||
def __len__(self) -> int:
|
def __len__(self) -> int:
|
||||||
return len(self.__root__)
|
return len(self.root)
|
||||||
|
|
||||||
# For some reason those were not the same???
|
# For some reason those were not the same???
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
@ -41,7 +40,7 @@ class Resource(BaseModel, Mapping):
|
|||||||
|
|
||||||
# Make the repr more logical (despite the technical inaccuracy)
|
# Make the repr more logical (despite the technical inaccuracy)
|
||||||
def __repr_args__(self):
|
def __repr_args__(self):
|
||||||
return self.__root__.items()
|
return self.root.items()
|
||||||
|
|
||||||
|
|
||||||
class Index(BaseModel, Mapping):
|
class Index(BaseModel, Mapping):
|
||||||
|
Loading…
Reference in New Issue
Block a user