[article id=“1660”]
后续
如何获取relay中继服务器的列表呢 参考项目 https://github.com/dragonfly-club/dragon-relay 这个项目呢 是把列表生成自定义的html页面,不够灵活,所以我改了一下 用以生成json数据
我原本的设想是通过dockerfile重新构建一个docker镜像,但是一想又嫌麻烦,所以只好通过曲线救国了///
使用方法
python
gen-member-list.py
的内容
#!/usr/bin/python3
import loggingimport requestsimport base64import jsonfrom collections import Counterfrom subprocess import Popen, PIPEimport shutil
outfile = 'output.json'stats_file = 'stats.json'USER_AGENT = 'Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/109.0 (https://relay.jiong.us)'TIMEOUT = 4
instance_ids = set()
def setup_logging(): logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) log_handler = logging.FileHandler('gen-member-list.log') log_handler.setLevel(logging.INFO) log_format = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s') log_handler.setFormatter(log_format) logger.addHandler(log_handler) return logger
logger = setup_logging()
def get_redis_cli_path(): redis_cli = shutil.which('redis-cli') if redis_cli: return redis_cli else: raise FileNotFoundError("redis-cli not found in PATH")
def read_redis_keys(): redis_cli = get_redis_cli_path() cmd = [redis_cli] cmdin = 'KEYS relay:subscription:*'.encode('utf-8') p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) return p.communicate(input=cmdin)[0].decode('utf-8')
def generate_instance_id(page): uid = [] fields = ['uri', 'email', 'name', 'hcaptchaSiteKey'] for field in fields: try: uid.append(str(page.get(field, ''))) except AttributeError: pass
try: if page.get('contact_account'): uid.append(str(page['contact_account'].get('id', ''))) uid.append(str(page['contact_account'].get('username', ''))) except AttributeError: pass
return '_'.join(filter(None, uid))
def fetch_favicon(domain): try: favicon_url = f"https://{domain}/favicon.ico" response = requests.get(favicon_url, timeout=TIMEOUT) if response.status_code == 200: return base64.b64encode(response.content).decode('utf-8') except Exception as e: logger.warning(f"Failed to fetch favicon for {domain}: {str(e)}") return ""
def try_nodeinfo(headers, domain, timeout): nodeinfo_url = f"https://{domain}/.well-known/nodeinfo" response = requests.get(nodeinfo_url, headers=headers, timeout=timeout) nodeinfo_link = response.json()['links'][0]['href']
response = requests.get(nodeinfo_link, headers=headers, timeout=timeout) nodeinfo = response.json()
software = nodeinfo['software']['name'] version = nodeinfo['software']['version'] stats = nodeinfo['usage']
fav_md = fetch_favicon(domain) title = nodeinfo["metadata"].get("nodeName", domain.split('.')[0].capitalize())
uid = generate_instance_id(nodeinfo)
json_line = { 'favicon': fav_md, 'title': title, 'domain': domain, 'users': stats['users']['total'], 'posts': stats.get('localPosts', 0), 'software': software, 'version': version, 'instances': nodeinfo.get('metadata', {}).get('federation', {}).get('domainCount', 0) }
return json_line, uid
def try_mastodon(headers, domain, timeout): instance_url = f"https://{domain}/api/v1/instance" response = requests.get(instance_url, headers=headers, timeout=timeout) instance_info = response.json()
stats_url = f"https://{domain}/api/v1/instance/peers" response = requests.get(stats_url, headers=headers, timeout=timeout) peers = response.json()
fav_md = fetch_favicon(domain) title = instance_info['title'] version = instance_info['version']
uid = generate_instance_id(instance_info)
json_line = { 'favicon': fav_md, 'title': title, 'domain': domain, 'users': instance_info['stats']['user_count'], 'statuses': instance_info['stats']['status_count'], 'instances': len(peers), 'version': version, 'software': 'mastodon' }
return json_line, uid
def try_misskey(headers, domain, timeout): meta_url = f"https://{domain}/api/meta" response = requests.post(meta_url, headers=headers, timeout=timeout) meta_info = response.json()
stats_url = f"https://{domain}/api/stats" response = requests.post(stats_url, headers=headers, timeout=timeout) stats = response.json()
fav_md = fetch_favicon(domain) title = meta_info['name'] version = meta_info['version']
uid = generate_instance_id(meta_info)
json_line = { 'favicon': fav_md, 'title': title, 'domain': domain, 'users': stats['originalUsersCount'], 'notes': stats['originalNotesCount'], 'instances': stats['instances'], 'version': version, 'software': 'misskey' }
return json_line, uid
def generate_list(): json_list = [] all_domains = [line.split('subscription:')[-1] for line in read_redis_keys().split('\n') if line and 'subscription' in line] logger.info(f"Total domains from Redis: {len(all_domains)}")
success_count = 0 failure_count = 0 software_counter = Counter() interaction_stats = {}
for domain in all_domains: logger.info(f"Processing domain: {domain}")
headers = { 'User-Agent': USER_AGENT }
json_line = {'domain': domain, 'status': 'Stats Unavailable'} uid = None success = False
for try_function in [try_mastodon, try_misskey, try_nodeinfo]: try: json_line, uid = try_function(headers, domain, TIMEOUT) logger.info(f"Successfully fetched stats for {domain} using {try_function.__name__}") success = True break except Exception as e: logger.warning(f"Failed to fetch stats for {domain} using {try_function.__name__}: {str(e)}")
if success: success_count += 1 software_counter[json_line.get('software', 'Unknown')] += 1
interaction_count = json_line.get('statuses', 0) or json_line.get('notes', 0) or json_line.get('posts', 0) interaction_stats[domain] = interaction_count
logger.info(f"Instances count for {domain}: {json_line.get('instances', 0)}") else: failure_count += 1
if uid and uid in instance_ids: logger.info(f"Skipped duplicate domain {domain} with uid {uid}") continue
if uid: instance_ids.add(uid) json_list.append(json_line) logger.info(f"Added {domain} to the list")
logger.info(f"Total instances processed: {len(json_list)}") logger.info(f"Successful instances: {success_count}") logger.info(f"Failed instances: {failure_count}") logger.info(f"Software distribution: {dict(software_counter)}")
json_list.sort(key=lambda x: x.get('users', 0), reverse=True)
stats = { "total_instances": len(json_list), "successful_instances": success_count, "failed_instances": failure_count, "software_distribution": dict(software_counter), "interaction_stats": interaction_stats }
with open(stats_file, 'w') as f: json.dump(stats, f, indent=2)
return json_list
if __name__ == "__main__": logger.info('Started generating member list.') sub_list = generate_list() with open(outfile, 'w') as f: json.dump(sub_list, f, indent=2) logger.info('Write new page template done.')
bash脚本
update-list.sh
的内容
#!/bin/sh
if [ ! -f /tmp/setup_done ]; then #如果存在安装缓存则跳过,重启容器会重新安装依赖 apk add python3 py3-pip py3-requests #安装python apk add tzdata #修正时区 cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime echo "Asia/Shanghai" > /etc/timezone touch /tmp/setup_donefi
cd /relay/
./gen-member-list.py || exit 1;
exit 0;
修改Dockercompose.yaml
这一步就是映射本地脚本到docker容器中
services: redis: restart: always image: redis:alpine healthcheck: test: ["CMD", "redis-cli", "ping"] volumes: - "./redisdata:/data" - "./relay:/relay"
worker: container_name: worker build: . image: yukimochi/activity-relay working_dir: /var/lib/relay restart: always init: true command: relay worker volumes: - "./actor.pem:/var/lib/relay/actor.pem" - "./config.yml:/var/lib/relay/config.yml" depends_on: - redis
server: container_name: relay build: . image: yukimochi/activity-relay working_dir: /var/lib/relay restart: always init: true ports: - "8080:8080" command: relay server volumes: - "./actor.pem:/var/lib/relay/actor.pem" - "./config.yml:/var/lib/relay/config.yml" depends_on: - redis
然后把上面两个脚本都丢进relay
的文件夹
定时任务
使用宝塔或者系统的计划任务
activity-relay-redis-1
为redis的容器名,执行bash
docker exec activity-relay-redis-1 /bin/sh /relay/update-list.sh