mirror of
https://github.com/miaowware/qrm2.git
synced 2025-09-09 07:57:47 -04:00
👀 remote lookup functions
This commit is contained in:
parent
454ca51720
commit
1b734a5151
102
cogs/lookupcog.py
Normal file
102
cogs/lookupcog.py
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
"""
|
||||||
|
Lookup cog for qrm
|
||||||
|
---
|
||||||
|
Copyright (C) 2019 Abigail Gold, 0x5c
|
||||||
|
|
||||||
|
This file is part of discord-qrmbot and is released under the terms of the GNU
|
||||||
|
General Public License, version 2.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import discord
|
||||||
|
from discord.ext import commands, tasks
|
||||||
|
|
||||||
|
import json
|
||||||
|
from datetime import datetime
|
||||||
|
from util import cty_json
|
||||||
|
|
||||||
|
class LookupCog(commands.Cog):
|
||||||
|
def __init__(self, bot):
|
||||||
|
self.bot = bot
|
||||||
|
self.gs = bot.get_cog("GlobalSettings")
|
||||||
|
self.CTY = None
|
||||||
|
self.CTY_list = None
|
||||||
|
self.up_cty_first_run = True
|
||||||
|
|
||||||
|
@commands.command(name="ae7q", aliases=['ae'])
|
||||||
|
async def _ae7q_lookup(self, ctx, call: str):
|
||||||
|
'''Links to info about a callsign from AE7Q.'''
|
||||||
|
await ctx.send(f'http://ae7q.com/query/data/CallHistory.php?CALL={call}')
|
||||||
|
|
||||||
|
@commands.command(name="qrz")
|
||||||
|
async def _qrz_lookup(self, ctx, call: str):
|
||||||
|
'''Links to info about a callsign from QRZ.'''
|
||||||
|
await ctx.send(f'http://qrz.com/db/{call}')
|
||||||
|
|
||||||
|
@commands.command(name="sat")
|
||||||
|
async def _sat_lookup(self, ctx, sat: str, grid1: str, grid2: str = None):
|
||||||
|
'''Links to info about satellite passes on satmatch.com.
|
||||||
|
Usage: `?sat sat_name grid1 grid2`'''
|
||||||
|
now = datetime.utcnow().strftime('%Y-%m-%d%%20%H:%M')
|
||||||
|
if grid2 is None or grid2 == '':
|
||||||
|
await ctx.send(f'http://www.satmatch.com/satellite/{sat}/obs1/{grid1}?search_start_time={now}&duration_hrs=24')
|
||||||
|
else:
|
||||||
|
await ctx.send(f'http://www.satmatch.com/satellite/{sat}/obs1/{grid1}/obs2/{grid2}?search_start_time={now}&duration_hrs=24')
|
||||||
|
|
||||||
|
@commands.command(name="dxcc", aliases=['dx'])
|
||||||
|
async def _dxcc_lookup(self, ctx, q: str):
|
||||||
|
'''Gets info about a prefix.'''
|
||||||
|
with ctx.typing():
|
||||||
|
noMatch = True
|
||||||
|
qMatch = None
|
||||||
|
q = q.upper()
|
||||||
|
q0 = q
|
||||||
|
if q != 'LAST_UPDATED':
|
||||||
|
embed = discord.Embed(title=f'DXCC Info for {q0}')
|
||||||
|
embed.description = f'Prefix {q0} not found'
|
||||||
|
embed.colour = self.gs.colours.bad
|
||||||
|
while noMatch:
|
||||||
|
if q in self.CTY_list:
|
||||||
|
qMatch = q
|
||||||
|
noMatch = False
|
||||||
|
else:
|
||||||
|
q = q[:-1]
|
||||||
|
if len(q) == 0:
|
||||||
|
noMatch = False
|
||||||
|
if qMatch is not None:
|
||||||
|
d = self.CTY[qMatch]
|
||||||
|
prefix = qMatch
|
||||||
|
embed = embed.add_field(name="Entity", value=d['entity'])
|
||||||
|
embed = embed.add_field(name="CQ Zone", value=d['cq'])
|
||||||
|
embed = embed.add_field(name="ITU Zone", value=d['itu'])
|
||||||
|
embed = embed.add_field(name="Continent", value=d['continent'])
|
||||||
|
tz = d['tz']
|
||||||
|
if tz > 0:
|
||||||
|
tz = '+' + str(tz)
|
||||||
|
embed = embed.add_field(name="Time Zone", value=tz)
|
||||||
|
embed.description = ''
|
||||||
|
embed.colour = self.gs.colours.good
|
||||||
|
else:
|
||||||
|
updatedDate = self.CTY['last_updated'][0:4] + '-'
|
||||||
|
updatedDate += self.CTY['last_updated'][4:6] + '-'
|
||||||
|
updatedDate += self.CTY['last_updated'][6:8]
|
||||||
|
res = f'CTY.DAT last updated on {updatedDate}'
|
||||||
|
embed = discord.Embed(title=res, colour=self.gs.colours.neutral)
|
||||||
|
await ctx.send(embed=embed)
|
||||||
|
|
||||||
|
@tasks.loop(hours=24)
|
||||||
|
async def _update_cty(self):
|
||||||
|
print('Checking for CTY update...')
|
||||||
|
regen = cty_json.genCtyJson()
|
||||||
|
if regen or self.up_cty_first_run:
|
||||||
|
with open('resources/cty.json') as ctyfile:
|
||||||
|
print('Reloading CTY JSON data...')
|
||||||
|
self.CTY = json.load(ctyfile)
|
||||||
|
self.CTY_list = list(self.CTY.keys())
|
||||||
|
self.CTY_list.sort()
|
||||||
|
self.CTY_list.sort(key=len, reverse=True)
|
||||||
|
up_cty_first_run = False
|
||||||
|
|
||||||
|
|
||||||
|
def setup(bot):
|
||||||
|
bot.add_cog(LookupCog(bot))
|
||||||
|
bot.get_cog("LookupCog")._update_cty.start()
|
1
main.py
1
main.py
@ -70,6 +70,7 @@ bot.add_cog(GlobalSettings(bot))
|
|||||||
bot.load_extension("cogs.basecog")
|
bot.load_extension("cogs.basecog")
|
||||||
bot.load_extension("cogs.morsecog")
|
bot.load_extension("cogs.morsecog")
|
||||||
bot.load_extension("cogs.funcog")
|
bot.load_extension("cogs.funcog")
|
||||||
|
bot.load_extension("cogs.lookupcog")
|
||||||
|
|
||||||
_ensure_activity.start()
|
_ensure_activity.start()
|
||||||
|
|
||||||
|
@ -1 +1,3 @@
|
|||||||
discord.py
|
discord.py
|
||||||
|
feedparser
|
||||||
|
requests
|
||||||
|
98
util/cty_json.py
Executable file
98
util/cty_json.py
Executable file
@ -0,0 +1,98 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Generates JSON from a CTY.DAT file
|
||||||
|
|
||||||
|
Format:
|
||||||
|
entity name: CQ Zone: ITU Zone: Continent: Latitude: Longitude: Time Zone: Primary Prefix:
|
||||||
|
other,prefixes,and,=callsigns;
|
||||||
|
"""
|
||||||
|
import re
|
||||||
|
import json
|
||||||
|
import feedparser
|
||||||
|
from datetime import datetime
|
||||||
|
import requests
|
||||||
|
import zipfile
|
||||||
|
import sys, os
|
||||||
|
|
||||||
|
def genCtyJson():
|
||||||
|
try:
|
||||||
|
old_cty = json.load(open('resources/cty.json'))['last_updated']
|
||||||
|
except:
|
||||||
|
old_cty = None
|
||||||
|
print('Missing/Broken cty.json')
|
||||||
|
|
||||||
|
try:
|
||||||
|
feed = feedparser.parse('http://www.country-files.com/category/big-cty/feed/')
|
||||||
|
updateURL = feed.entries[0]['link']
|
||||||
|
dateStr = re.search(r'(\d{2}-\w+-\d{4})', updateURL).group(1).title()
|
||||||
|
updateDate = datetime.strftime(datetime.strptime(dateStr, '%d-%B-%Y'), '%Y%m%d')
|
||||||
|
except:
|
||||||
|
print('Error parsing URL or feed')
|
||||||
|
|
||||||
|
if old_cty == updateDate:
|
||||||
|
print('Already up-to-date')
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
dlURL = f'http://www.country-files.com/bigcty/download/bigcty-{updateDate}.zip'
|
||||||
|
r = requests.get(dlURL)
|
||||||
|
with open('cty.zip', 'wb') as dlFile:
|
||||||
|
dlFile.write(r.content)
|
||||||
|
with zipfile.ZipFile('cty.zip') as ctyZip:
|
||||||
|
try:
|
||||||
|
ctyZip.extract('cty.dat')
|
||||||
|
except:
|
||||||
|
print('Couldn\'t extract cty.dat')
|
||||||
|
os.remove('cty.zip')
|
||||||
|
except:
|
||||||
|
print('Error retrieving new cty.dat')
|
||||||
|
|
||||||
|
with open('cty.dat') as ctyfile:
|
||||||
|
cty = dict()
|
||||||
|
|
||||||
|
cty['last_updated'] = updateDate
|
||||||
|
|
||||||
|
last = ''
|
||||||
|
while True:
|
||||||
|
line = ctyfile.readline().rstrip('\x0D').strip(':')
|
||||||
|
if not line:
|
||||||
|
break
|
||||||
|
if line != '' and line[0].isalpha():
|
||||||
|
line = [x.strip() for x in line.split(':')]
|
||||||
|
if line[7][0] == '*':
|
||||||
|
line[7] = line[7][1:]
|
||||||
|
line[0] += ' (not DXCC)'
|
||||||
|
cty[line[7]] = {'entity':line[0], 'cq': int(line[1]),
|
||||||
|
'itu':int(line[2]), 'continent': line[3],
|
||||||
|
'lat':float(line[4]), 'long':float(line[5]),
|
||||||
|
'tz':-1*float(line[6]), 'len': len(line[7])}
|
||||||
|
last = line[7]
|
||||||
|
|
||||||
|
elif line != '' and line[0].isspace():
|
||||||
|
line = line.strip().rstrip(';').rstrip(',').split(',')
|
||||||
|
for i in line:
|
||||||
|
if i not in cty.keys():
|
||||||
|
data = cty[last]
|
||||||
|
if re.search(r'\[(\d+)\]', i):
|
||||||
|
data['itu'] = int(re.search(r'\[(\d+)\]', i).group(1))
|
||||||
|
if re.search(r'\((\d+)\)', i):
|
||||||
|
data['cq'] = int(re.search(r'\((\d+)\)', i).group(1))
|
||||||
|
if re.search(r'<(\d+)\/(\d+)>', i):
|
||||||
|
data['lat'] = float(re.search(r'<(\d+)/(\d+)>', i).group(2))
|
||||||
|
data['long'] = float(re.search(r'<(\d+)/(\d+)>', i).group(2))
|
||||||
|
if re.search(r'\{(\w+)\}', i):
|
||||||
|
data['continent'] = re.search(r'\{(\w+)\}', i).group(1)
|
||||||
|
if re.search(r'~(\w+)~', i):
|
||||||
|
data['tz'] = -1 * float(re.search(r'\{(\w+)\}', i).group(1))
|
||||||
|
prefix = re.sub(r'=?([^\(\[]*)(\(\d+\))?(\[\d+\])?(<\d+\/\d+>)?(\{\w+\})?(~\w+~)?', r'\1', i)
|
||||||
|
cty[prefix] = data
|
||||||
|
with open('resources/cty.json', 'w') as cty_json:
|
||||||
|
json.dump(cty, cty_json)
|
||||||
|
|
||||||
|
os.remove('cty.dat')
|
||||||
|
return True
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
status = genCtyJson()
|
||||||
|
print(status)
|
||||||
|
|
Loading…
x
Reference in New Issue
Block a user