-
-
Notifications
You must be signed in to change notification settings - Fork 6
/
daemon.py
1449 lines (1286 loc) · 51.9 KB
/
daemon.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
__filename__ = "daemon.py"
__author__ = "Bob Mottram"
__license__ = "AGPL3+"
__version__ = "1.5.0"
__maintainer__ = "Bob Mottram"
__email__ = "[email protected]"
__status__ = "Production"
__module_group__ = "Core"
from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer, HTTPServer
import sys
import time
import os
from socket import error as SocketError
import errno
from functools import partial
# for saving images
from metadata import metadata_custom_emoji
from person import update_memorial_flags
from person import clear_person_qrcodes
from person import create_shared_inbox
from person import create_news_inbox
from keys import get_instance_actor_key
from posts import expire_cache
from inbox import run_inbox_queue
from inbox import run_inbox_queue_watchdog
from follow import create_initial_last_seen
from threads import begin_thread
from threads import thread_with_trace
from threads import remove_dormant_threads
from cwlists import load_cw_lists
from blocking import run_federated_blocks_daemon
from blocking import load_federated_blocks_endpoints
from blocking import load_blocked_military
from blocking import load_blocked_government
from blocking import load_blocked_bluesky
from blocking import update_blocked_cache
from blocking import set_broch_mode
from blocking import get_domain_blocklist
from webapp_utils import load_buy_sites
from webapp_accesskeys import load_access_keys_for_accounts
from webapp_media import load_peertube_instances
from shares import run_federated_shares_daemon
from shares import run_federated_shares_watchdog
from shares import create_shared_item_federation_token
from shares import generate_shared_item_federation_tokens
from shares import expire_shares
from categories import load_city_hashtags
from categories import update_hashtag_categories
from languages import load_default_post_languages
from utils import load_searchable_by_default
from utils import set_accounts_data_dir
from utils import data_dir
from utils import check_bad_path
from utils import acct_handle_dir
from utils import load_reverse_timeline
from utils import load_min_images_for_accounts
from utils import load_account_timezones
from utils import load_translations_from_file
from utils import load_bold_reading
from utils import load_hide_follows
from utils import get_full_domain
from utils import set_config_param
from utils import get_config_param
from utils import load_json
from utils import load_mitm_servers
from content import load_auto_cw_cache
from content import load_dogwhistles
from theme import scan_themes_for_scripts
from theme import is_news_theme_name
from theme import get_text_mode_banner
from theme import set_news_avatar
from schedule import run_post_schedule
from schedule import run_post_schedule_watchdog
from happening import dav_propfind_response
from happening import dav_put_response
from happening import dav_report_response
from happening import dav_delete_response
from newswire import load_hashtag_categories
from newsdaemon import run_newswire_watchdog
from newsdaemon import run_newswire_daemon
from fitnessFunctions import fitness_thread
from siteactive import load_unavailable_sites
from crawlers import load_known_web_bots
from qrcode import save_domain_qrcode
from importFollowing import run_import_following_watchdog
from relationships import update_moved_actors
from daemon_get import daemon_http_get
from daemon_post import daemon_http_post
from daemon_head import daemon_http_head
from httpcodes import http_200
from httpcodes import http_201
from httpcodes import http_207
from httpcodes import http_403
from httpcodes import http_404
from httpcodes import http_304
from httpcodes import http_400
from httpcodes import write2
from httpheaders import set_headers
from daemon_utils import load_known_epicyon_instances
from daemon_utils import has_accept
from daemon_utils import is_authorized
from poison import load_dictionary
from poison import load_2grams
class PubServer(BaseHTTPRequestHandler):
protocol_version = 'HTTP/1.1'
def handle_error(self, request, client_address):
"""HTTP server error handling
"""
print('ERROR: http server error: ' + str(request) + ', ' +
str(client_address))
def do_GET(self):
daemon_http_get(self)
def _dav_handler(self, endpoint_type: str, debug: bool):
calling_domain = self.server.domain_full
if not has_accept(self, calling_domain):
http_400(self)
return
accept_str = self.headers['Accept']
if 'application/xml' not in accept_str:
if debug:
print(endpoint_type.upper() + ' is not of xml type')
http_400(self)
return
if not self.headers.get('Content-length'):
print(endpoint_type.upper() + ' has no content-length')
http_400(self)
return
# check that the content length string is not too long
if isinstance(self.headers['Content-length'], str):
max_content_size = len(str(self.server.maxMessageLength))
if len(self.headers['Content-length']) > max_content_size:
http_400(self)
return
length = int(self.headers['Content-length'])
if length > self.server.max_post_length:
print(endpoint_type.upper() +
' request size too large ' + self.path)
http_400(self)
return
if not self.path.startswith('/calendars/'):
print(endpoint_type.upper() + ' without /calendars ' + self.path)
http_404(self, 145)
return
if debug:
print(endpoint_type.upper() + ' checking authorization')
if not is_authorized(self):
print(endpoint_type.upper() + ' not authorized')
http_403(self)
return
nickname = self.path.split('/calendars/')[1]
if '/' in nickname:
nickname = nickname.split('/')[0]
if not nickname:
print(endpoint_type.upper() + ' no nickname ' + self.path)
http_400(self)
return
dir_str = data_dir(self.server.base_dir)
if not os.path.isdir(dir_str + '/' +
nickname + '@' + self.server.domain):
print(endpoint_type.upper() +
' for non-existent account ' + self.path)
http_404(self, 146)
return
propfind_bytes = None
try:
propfind_bytes = self.rfile.read(length)
except SocketError as ex:
if ex.errno == errno.ECONNRESET:
print('EX: ' + endpoint_type.upper() +
' connection reset by peer')
else:
print('EX: ' + endpoint_type.upper() + ' socket error')
http_400(self)
return
except ValueError as ex:
print('EX: ' + endpoint_type.upper() +
' rfile.read failed, ' + str(ex))
http_400(self)
return
if not propfind_bytes:
http_404(self, 147)
return
propfind_xml = propfind_bytes.decode('utf-8')
response_str = None
if endpoint_type == 'propfind':
response_str = \
dav_propfind_response(nickname, propfind_xml)
elif endpoint_type == 'put':
response_str = \
dav_put_response(self.server.base_dir,
nickname, self.server.domain,
propfind_xml,
self.server.http_prefix,
self.server.system_language,
self.server.recent_dav_etags)
elif endpoint_type == 'report':
curr_etag = None
if self.headers.get('ETag'):
curr_etag = self.headers['ETag']
elif self.headers.get('Etag'):
curr_etag = self.headers['Etag']
response_str = \
dav_report_response(self.server.base_dir,
nickname, self.server.domain,
propfind_xml,
self.server.person_cache,
self.server.http_prefix,
curr_etag,
self.server.recent_dav_etags,
self.server.domain_full,
self.server.system_language)
elif endpoint_type == 'delete':
response_str = \
dav_delete_response(self.server.base_dir,
nickname, self.server.domain,
self.path,
self.server.http_prefix,
debug,
self.server.recent_posts_cache)
if not response_str:
http_404(self, 148)
return
if response_str == 'Not modified':
if endpoint_type == 'put':
http_200(self)
return
http_304(self)
return
if response_str.startswith('ETag:') and endpoint_type == 'put':
response_etag = response_str.split('ETag:', 1)[1]
http_201(self, response_etag)
elif response_str != 'Ok':
message_xml = response_str.encode('utf-8')
message_xml_len = len(message_xml)
set_headers(self, 'application/xml; charset=utf-8',
message_xml_len,
None, calling_domain, False)
write2(self, message_xml)
if 'multistatus' in response_str:
return http_207(self)
http_200(self)
def do_PROPFIND(self):
if self.server.starting_daemon:
return
if check_bad_path(self.path):
http_400(self)
return
self._dav_handler('propfind', self.server.debug)
def do_PUT(self):
if self.server.starting_daemon:
return
if check_bad_path(self.path):
http_400(self)
return
self._dav_handler('put', self.server.debug)
def do_REPORT(self):
if self.server.starting_daemon:
return
if check_bad_path(self.path):
http_400(self)
return
self._dav_handler('report', self.server.debug)
def do_DELETE(self):
if self.server.starting_daemon:
return
if check_bad_path(self.path):
http_400(self)
return
self._dav_handler('delete', self.server.debug)
def do_HEAD(self):
daemon_http_head(self)
def do_POST(self):
daemon_http_post(self)
class PubServerUnitTest(PubServer):
protocol_version = 'HTTP/1.0'
class EpicyonServer(ThreadingHTTPServer):
starting_daemon = True
hide_announces = {}
no_of_books = 0
max_api_blocks = 32000
block_federated_endpoints = None
block_federated = []
books_cache = {}
max_recent_books = 1000
max_cached_readers = 24
auto_cw_cache = {}
sites_unavailable = None
max_shares_on_profile = 0
block_military = {}
block_government = {}
block_bluesky = {}
followers_synchronization = False
followers_sync_cache = {}
buy_sites = None
min_images_for_accounts = 0
default_post_language = None
css_cache = {}
reverse_sequence = None
clacks = None
public_replies_unlisted = False
dogwhistles = {}
preferred_podcast_formats = []
bold_reading = {}
hide_follows = {}
account_timezone = None
post_to_nickname = None
nodeinfo_is_active = False
security_txt_is_active = False
vcard_is_active = False
masto_api_is_active = False
map_format = None
dyslexic_font = False
content_license_url = ''
dm_license_url = ''
fitness = {}
signing_priv_key_pem = None
show_node_info_accounts = False
show_node_info_version = False
text_mode_banner = ''
access_keys = {}
rss_timeout_sec = 20
check_actor_timeout = 2
default_reply_interval_hrs = 9999999
recent_dav_etags = {}
key_shortcuts = {}
low_bandwidth = False
user_agents_blocked = None
crawlers_allowed = None
known_bots = None
unit_test = False
allow_local_network_access = False
yt_replace_domain = ''
twitter_replacement_domain = ''
newswire = {}
max_newswire_posts = 0
verify_all_signatures = False
blocklistUpdateCtr = 0
blocklistUpdateInterval = 100
domainBlocklist = None
manual_follower_approval = True
onion_domain = None
i2p_domain = None
media_instance = False
blogs_instance = False
translate = {}
system_language = 'en'
city = ''
voting_time_mins = 30
positive_voting = False
newswire_votes_threshold = 1
max_newswire_feed_size_kb = 1
max_newswire_posts_per_source = 1
show_published_date_only = False
max_mirrored_articles = 0
max_news_posts = 0
maxTags = 32
max_followers = 2000
show_publish_as_icon = False
full_width_tl_button_header = False
icons_as_buttons = False
rss_icon_at_top = True
publish_button_at_top = False
max_feed_item_size_kb = 100
maxCategoriesFeedItemSizeKb = 1024
dormant_months = 6
max_like_count = 10
followingItemsPerPage = 12
registration = False
enable_shared_inbox = True
outboxThread = {}
outbox_thread_index = {}
new_post_thread = {}
project_version = __version__
secure_mode = True
max_post_length = 0
maxMediaSize = 0
maxMessageLength = 64000
maxPostsInBox = 32000
maxCacheAgeDays = 30
domain = ''
port = 43
domain_full = ''
http_prefix = 'https'
debug = False
federation_list = []
shared_items_federated_domains = []
base_dir = ''
instance_id = ''
person_cache = {}
cached_webfingers = {}
favicons_cache = {}
proxy_type = None
session = None
session_onion = None
session_i2p = None
last_getreq = 0
last_postreq = 0
getreq_busy = False
postreq_busy = False
received_message = False
inbox_queue = []
send_threads = None
postLog = []
max_queue_length = 64
allow_deletion = True
last_login_time = 0
last_login_failure = 0
login_failure_count = {}
log_login_failures = True
max_replies = 10
tokens = {}
tokens_lookup = {}
instance_only_skills_search = True
followers_threads = []
blocked_cache = []
blocked_cache_last_updated = 0
blocked_cache_update_secs = 120
blocked_cache_last_updated = 0
custom_emoji = {}
known_crawlers = {}
last_known_crawler = 0
lists_enabled = None
cw_lists = {}
theme_name = ''
news_instance = False
default_timeline = 'inbox'
thrFitness = None
recent_posts_cache = {}
thrCache = None
send_threads_timeout_mins = 3
thrPostsQueue = None
thrPostsWatchdog = None
thrSharesExpire = None
thrSharesExpireWatchdog = None
max_recent_posts = 1
iconsCache = {}
fontsCache = {}
shared_item_federation_tokens = None
shared_item_federation_tokens = None
peertube_instances = []
max_mentions = 10
max_emoji = 10
max_hashtags = 10
thrInboxQueue = None
thrPostSchedule = None
thrNewswireDaemon = None
thrFederatedSharesDaemon = None
restart_inbox_queue_in_progress = False
restart_inbox_queue = False
signing_priv_key_pem = ''
thrCheckActor = {}
thrImportFollowing = None
thrWatchdog = None
thrWatchdogSchedule = None
thrNewswireWatchdog = None
thrFederatedSharesWatchdog = None
thrFederatedBlocksDaemon = None
qrcode_scale = 6
instance_description = ''
instance_description_short = 'Epicyon'
robots_txt = None
last_llm_time = None
mitm_servers = []
watermark_width_percent = 0
watermark_position = 0
watermark_opacity = 0
headers_catalog = {}
dictionary = []
twograms = {}
searchable_by_default = {}
known_epicyon_instances = []
def handle_error(self, request, client_address):
# surpress connection reset errors
cls, e_ret = sys.exc_info()[:2]
if cls is ConnectionResetError:
if e_ret.errno != errno.ECONNRESET:
print('ERROR: (EpicyonServer) ' + str(cls) + ", " + str(e_ret))
elif cls is BrokenPipeError:
pass
else:
print('ERROR: (EpicyonServer) ' + str(cls) + ", " + str(e_ret))
return HTTPServer.handle_error(self, request, client_address)
def run_posts_queue(base_dir: str, send_threads: [], debug: bool,
timeout_mins: int) -> None:
"""Manages the threads used to send posts
"""
while True:
time.sleep(1)
remove_dormant_threads(base_dir, send_threads, debug, timeout_mins)
def run_shares_expire(version_number: str, base_dir: str, httpd) -> None:
"""Expires shares as needed
"""
while True:
time.sleep(120)
expire_shares(base_dir, httpd.max_shares_on_profile,
httpd.person_cache)
def run_posts_watchdog(project_version: str, httpd) -> None:
"""This tries to keep the posts thread running even if it dies
"""
print('THREAD: Starting posts queue watchdog')
posts_queue_original = httpd.thrPostsQueue.clone(run_posts_queue)
begin_thread(httpd.thrPostsQueue, 'run_posts_watchdog')
while True:
time.sleep(20)
if httpd.thrPostsQueue.is_alive():
continue
httpd.thrPostsQueue.kill()
print('THREAD: restarting posts queue')
httpd.thrPostsQueue = posts_queue_original.clone(run_posts_queue)
begin_thread(httpd.thrPostsQueue, 'run_posts_watchdog 2')
print('Restarting posts queue...')
def run_shares_expire_watchdog(project_version: str, httpd) -> None:
"""This tries to keep the shares expiry thread running even if it dies
"""
print('THREAD: Starting shares expiry watchdog')
shares_expire_original = httpd.thrSharesExpire.clone(run_shares_expire)
begin_thread(httpd.thrSharesExpire, 'run_shares_expire_watchdog')
while True:
time.sleep(20)
if httpd.thrSharesExpire.is_alive():
continue
httpd.thrSharesExpire.kill()
print('THREAD: restarting shares watchdog')
httpd.thrSharesExpire = shares_expire_original.clone(run_shares_expire)
begin_thread(httpd.thrSharesExpire, 'run_shares_expire_watchdog 2')
print('Restarting shares expiry...')
def load_tokens(base_dir: str, tokens_dict: {}, tokens_lookup: {}) -> None:
"""Loads shared items access tokens for each account
"""
dir_str = data_dir(base_dir)
for _, dirs, _ in os.walk(dir_str):
for handle in dirs:
if '@' in handle:
token_filename = acct_handle_dir(base_dir, handle) + '/.token'
if not os.path.isfile(token_filename):
continue
nickname = handle.split('@')[0]
token = None
try:
with open(token_filename, 'r',
encoding='utf-8') as fp_tok:
token = fp_tok.read()
except OSError as ex:
print('WARN: Unable to read token for ' +
nickname + ' ' + str(ex))
if not token:
continue
tokens_dict[nickname] = token
tokens_lookup[token] = nickname
break
def run_daemon(accounts_data_dir: str,
no_of_books: int,
public_replies_unlisted: int,
max_shares_on_profile: int,
max_hashtags: int,
map_format: str,
clacks: str,
preferred_podcast_formats: [],
check_actor_timeout: int,
crawlers_allowed: [],
dyslexic_font: bool,
content_license_url: str,
lists_enabled: str,
default_reply_interval_hrs: int,
low_bandwidth: bool,
max_like_count: int,
shared_items_federated_domains: [],
user_agents_blocked: [],
log_login_failures: bool,
city: str,
show_node_info_accounts: bool,
show_node_info_version: bool,
broch_mode: bool,
verify_all_signatures: bool,
send_threads_timeout_mins: int,
dormant_months: int,
max_newswire_posts: int,
allow_local_network_access: bool,
max_feed_item_size_kb: int,
publish_button_at_top: bool,
rss_icon_at_top: bool,
icons_as_buttons: bool,
full_width_tl_button_header: bool,
show_publish_as_icon: bool,
max_followers: int,
max_news_posts: int,
max_mirrored_articles: int,
max_newswire_feed_size_kb: int,
max_newswire_posts_per_source: int,
show_published_date_only: bool,
voting_time_mins: int,
positive_voting: bool,
newswire_votes_threshold: int,
news_instance: bool,
blogs_instance: bool,
media_instance: bool,
max_recent_posts: int,
enable_shared_inbox: bool,
registration: bool,
language: str,
project_version: str,
instance_id: str,
client_to_server: bool,
base_dir: str,
domain: str,
onion_domain: str,
i2p_domain: str,
yt_replace_domain: str,
twitter_replacement_domain: str,
port: int,
proxy_port: int,
http_prefix: str,
fed_list: [],
max_mentions: int,
max_emoji: int,
secure_mode: bool,
proxy_type: str,
max_replies: int,
domain_max_posts_per_day: int,
account_max_posts_per_day: int,
allow_deletion: bool,
debug: bool,
unit_test: bool,
instance_only_skills_search: bool,
send_threads: [],
manual_follower_approval: bool,
watermark_width_percent: int,
watermark_position: str,
watermark_opacity: int,
bind_to_ip_address: str) -> None:
if len(domain) == 0:
domain = 'localhost'
if '.' not in domain:
if domain != 'localhost':
print('Invalid domain: ' + domain)
return
update_moved_actors(base_dir, debug)
if unit_test:
server_address = (domain, proxy_port)
pub_handler = partial(PubServerUnitTest)
else:
if not bind_to_ip_address:
server_address = ('', proxy_port)
else:
server_address = (bind_to_ip_address, proxy_port)
pub_handler = partial(PubServer)
if accounts_data_dir:
set_accounts_data_dir(base_dir, accounts_data_dir)
dir_str = data_dir(base_dir)
if not os.path.isdir(dir_str):
print('Creating accounts directory')
os.mkdir(dir_str)
httpd = None
try:
httpd = EpicyonServer(server_address, pub_handler)
except SocketError as ex:
if ex.errno == errno.ECONNREFUSED:
print('EX: HTTP server address is already in use. ' +
str(server_address))
return False
print('EX: HTTP server failed to start. ' + str(ex))
print('server_address: ' + str(server_address))
return False
if not httpd:
print('Unable to start daemon')
return False
httpd.starting_daemon = True
# the last time when an LLM scraper was replied to
httpd.last_llm_time = None
# servers with man-in-the-middle transport encryption
httpd.mitm_servers = load_mitm_servers(base_dir)
# default "searchable by" for new posts for each account
httpd.searchable_by_default = load_searchable_by_default(base_dir)
# load the list of known Epicyon instances
httpd.known_epicyon_instances = load_known_epicyon_instances(base_dir)
# if a custom robots.txt exists then read it
robots_txt_filename = data_dir(base_dir) + '/robots.txt'
httpd.robots_txt = None
if os.path.isfile(robots_txt_filename):
new_robots_txt = ''
try:
with open(robots_txt_filename, 'r',
encoding='utf-8') as fp_robots:
new_robots_txt = fp_robots.read()
except OSError:
print('EX: error reading 1 ' + robots_txt_filename)
if new_robots_txt:
httpd.robots_txt = new_robots_txt
# width, position and opacity of watermark applied to attached images
# as a percentage of the attached image width
httpd.watermark_width_percent = watermark_width_percent
httpd.watermark_position = watermark_position
httpd.watermark_opacity = watermark_opacity
# for each account whether to hide announces
httpd.hide_announces = {}
hide_announces_filename = data_dir(base_dir) + '/hide_announces.json'
if os.path.isfile(hide_announces_filename):
httpd.hide_announces = load_json(hide_announces_filename)
# short description of the instance
httpd.instance_description_short = \
get_config_param(base_dir, 'instanceDescriptionShort')
if httpd.instance_description_short is None:
httpd.instance_description_short = 'Epicyon'
# description of the instance
httpd.instance_description = \
get_config_param(base_dir, 'instanceDescription')
if httpd.instance_description is None:
httpd.instance_description = ''
# number of book events which show on profile screens
httpd.no_of_books = no_of_books
# initialise federated blocklists
httpd.max_api_blocks = 32000
httpd.block_federated_endpoints = \
load_federated_blocks_endpoints(base_dir)
httpd.block_federated = []
# cache storing recent book events
httpd.books_cache = {}
httpd.max_recent_books = 1000
httpd.max_cached_readers = 24
# cache for automatic content warnings
httpd.auto_cw_cache = load_auto_cw_cache(base_dir)
# loads a catalog of http header fields
headers_catalog_fieldname = data_dir(base_dir) + '/headers_catalog.json'
httpd.headers_catalog = {}
if os.path.isfile(headers_catalog_fieldname):
httpd.headers_catalog = load_json(headers_catalog_fieldname)
# list of websites which are currently down
httpd.sites_unavailable = load_unavailable_sites(base_dir)
# maximum number of shared items attached to actors, as in
# https://codeberg.org/fediverse/fep/src/branch/main/fep/0837/fep-0837.md
httpd.max_shares_on_profile = max_shares_on_profile
# load a list of nicknames for accounts blocking military instances
httpd.block_military = load_blocked_military(base_dir)
# load a list of nicknames for accounts blocking government instances
httpd.block_government = load_blocked_government(base_dir)
# load a list of nicknames for accounts blocking bluesky bridges
httpd.block_bluesky = load_blocked_bluesky(base_dir)
# scan the theme directory for any svg files containing scripts
assert not scan_themes_for_scripts(base_dir)
# lock for followers synchronization
httpd.followers_synchronization = False
# cache containing followers synchronization hashes and json
httpd.followers_sync_cache = {}
# permitted sites from which the buy button may be displayed
httpd.buy_sites = load_buy_sites(base_dir)
# which accounts should minimize all attached images by default
httpd.min_images_for_accounts = load_min_images_for_accounts(base_dir)
# default language for each account when creating a new post
httpd.default_post_language = load_default_post_languages(base_dir)
# caches css files
httpd.css_cache = {}
httpd.reverse_sequence = load_reverse_timeline(base_dir)
httpd.clacks = get_config_param(base_dir, 'clacks')
if not httpd.clacks:
if clacks:
httpd.clacks = clacks
else:
httpd.clacks = 'GNU Natalie Nguyen'
httpd.public_replies_unlisted = public_replies_unlisted
# load a list of dogwhistle words
dogwhistles_filename = data_dir(base_dir) + '/dogwhistles.txt'
if not os.path.isfile(dogwhistles_filename):
dogwhistles_filename = base_dir + '/default_dogwhistles.txt'
httpd.dogwhistles = load_dogwhistles(dogwhistles_filename)
# list of preferred podcast formats
# eg ['audio/opus', 'audio/mp3', 'audio/speex']
httpd.preferred_podcast_formats = preferred_podcast_formats
# for each account, whether bold reading is enabled
httpd.bold_reading = load_bold_reading(base_dir)
# whether to hide follows on profile screen for each account
httpd.hide_follows = load_hide_follows(base_dir)
httpd.account_timezone = load_account_timezones(base_dir)
httpd.post_to_nickname = None
httpd.nodeinfo_is_active = False
httpd.security_txt_is_active = False
httpd.vcard_is_active = False
httpd.masto_api_is_active = False
# use kml or gpx format for hashtag maps
httpd.map_format = map_format.lower()
httpd.dyslexic_font = dyslexic_font
# license for content of the instance
if not content_license_url:
content_license_url = 'https://creativecommons.org/licenses/by-nc/4.0'
httpd.content_license_url = content_license_url
httpd.dm_license_url = ''
# fitness metrics
fitness_filename = data_dir(base_dir) + '/fitness.json'
httpd.fitness = {}
if os.path.isfile(fitness_filename):
fitness = load_json(fitness_filename)
if fitness is not None:
httpd.fitness = fitness
# initialize authorized fetch key
httpd.signing_priv_key_pem = None
httpd.show_node_info_accounts = show_node_info_accounts
httpd.show_node_info_version = show_node_info_version
# ASCII/ANSI text banner used in shell browsers, such as Lynx
httpd.text_mode_banner = get_text_mode_banner(base_dir)
# key shortcuts SHIFT + ALT + [key]
httpd.access_keys = {
'Page up': ',',
'Page down': '.',
'submitButton': 'y',
'followButton': 'f',
'moveButton': 'm',
'blockButton': 'b',
'infoButton': 'i',
'snoozeButton': 's',
'reportButton': '[',
'viewButton': 'v',
'unblockButton': 'u',
'enterPetname': 'p',
'enterNotes': 'n',
'menuTimeline': 't',
'menuEdit': 'e',
'menuThemeDesigner': 'z',
'menuProfile': 'p',
'menuInbox': 'i',
'menuSearch': '/',
'menuNewPost': 'n',
'menuNewBlog': '0',
'menuCalendar': 'c',
'menuDM': 'd',
'menuReplies': 'r',
'menuOutbox': 's',
'menuBookmarks': 'q',
'menuShares': 'h',
'menuWanted': 'w',
'menuReadingStatus': '=',
'menuBlogs': 'b',
'menuNewswire': '#',
'menuLinks': 'l',
'menuMedia': 'm',
'menuModeration': 'o',
'menuFollowing': 'f',
'menuFollowers': 'g',
'menuRoles': 'o',
'menuSkills': 'a',
'menuLogout': 'x',
'menuKeys': 'k',
'Public': 'p',
'Reminder': 'r'
}
# timeout used when getting rss feeds
httpd.rss_timeout_sec = 20
# load dictionary used for LLM poisoning
httpd.dictionary = load_dictionary(base_dir)
httpd.twograms = load_2grams(base_dir)
# timeout used when checking for actor changes when clicking an avatar
# and entering person options screen
if check_actor_timeout < 2:
check_actor_timeout = 2
httpd.check_actor_timeout = check_actor_timeout
# how many hours after a post was published can a reply be made
default_reply_interval_hrs = 9999999
httpd.default_reply_interval_hrs = default_reply_interval_hrs
# recent caldav etags for each account
httpd.recent_dav_etags = {}
httpd.key_shortcuts = {}
load_access_keys_for_accounts(base_dir, httpd.key_shortcuts,
httpd.access_keys)
# wheither to use low bandwidth images
httpd.low_bandwidth = low_bandwidth
# list of blocked user agent types within the User-Agent header
httpd.user_agents_blocked = user_agents_blocked
# list of crawler bots permitted within the User-Agent header
httpd.crawlers_allowed = crawlers_allowed
# list of web crawlers known to the system
httpd.known_bots = load_known_web_bots(base_dir)
httpd.unit_test = unit_test
httpd.allow_local_network_access = allow_local_network_access
if unit_test:
# unit tests are run on the local network with LAN addresses
httpd.allow_local_network_access = True
httpd.yt_replace_domain = yt_replace_domain
httpd.twitter_replacement_domain = twitter_replacement_domain
# newswire storing rss feeds
httpd.newswire = {}
# maximum number of posts to appear in the newswire on the right column
httpd.max_newswire_posts = max_newswire_posts
# whether to require that all incoming posts have valid jsonld signatures
httpd.verify_all_signatures = verify_all_signatures
# This counter is used to update the list of blocked domains in memory.
# It helps to avoid touching the disk and so improves flooding resistance
httpd.blocklistUpdateCtr = 0
httpd.blocklistUpdateInterval = 100
httpd.domainBlocklist = get_domain_blocklist(base_dir)
httpd.manual_follower_approval = manual_follower_approval
if domain.endswith('.onion'):
onion_domain = domain
elif domain.endswith('.i2p'):
i2p_domain = domain
httpd.onion_domain = onion_domain
httpd.i2p_domain = i2p_domain
httpd.media_instance = media_instance
httpd.blogs_instance = blogs_instance