import os, json, glob, logging from selection import strutil from threading import Lock import config sel = os.path.join(config.selection['path'], config.selection['sel']) sel_dump = os.path.join(config.selection['path'], config.selection['sel_dump']) LL = Lock() # TAGS def update(tag, newtag, newdesc): with LL: d = load_selection() if not tag in list(d.keys()): return False else: if newtag != tag: d[newtag] = d.pop(tag) else: d[tag]['desc'] = newdesc write_selection(d) sd = load_selection_dump() if not tag in list(sd.keys()): logging.warning("possible inconsistency between sel and sel_dump...") else: if newtag != tag: sd[newtag] = sd.pop(tag) else: sd[tag]['desc'] = newdesc write_selection_dump(sd) return True def delete(tag): with LL: d = load_selection() if not tag in list(d.keys()): return False else: d.pop(tag) write_selection(d) sd = load_selection_dump() if not tag in list(sd.keys()): logging.warning("possible inconsistency between sel and sel_dump...") else: sd.pop(tag) write_selection_dump(sd) return True def new(tag, desc): with LL: d = load_selection() if tag in list(d.keys()): return False else: d[tag] = {"desc": desc, "lists": []} write_selection(d) sd = load_selection_dump() sd[tag] = {"desc": desc, "lists": []} write_selection_dump(sd) return True # URL def delete_url(tag, url): with LL: d = load_selection() if tag not in list(d.keys()): return False for m in d[tag]['lists']: if m['url'] == url: d[tag]['lists'].remove(m) break write_selection(d) sd = load_selection_dump() if not tag in list(sd.keys()): logging.warning("possible inconsistency between sel and sel_dump...") else: for m in sd[tag]['lists']: if recursive_delete(m, sd[tag]['lists'], url): break write_selection_dump(sd) return True def load_selection(): with open(sel, encoding='utf-8') as f: d = json.load(f) return d def load_selection_dump(): with open(sel_dump, encoding='utf-8') as f: d = json.load(f) return d def write_selection(d): with open(sel, 'w+', encoding='utf-8') as f: json.dump(d, f, indent=4) def write_selection_dump(d): with open(sel_dump, 'w+', encoding='utf-8') as f: json.dump(d, f, indent=4) def lists(): return os.listdir(config.archives) def tags(): d = load_selection() tags = [] for k, v in d.items(): tags.append({'tag': k, 'desc': v['desc']}) return tags def tags_list(): d = load_selection() return list(d.keys()) def tags_w_lists(): d = load_selection_dump() tags = [] for k, v in d.items(): t = {'tag': k, 'desc': v['desc']} l = [] for m in v['lists']: l += recursive_info(m, keep_hierachy=True) t['lists'] = l tags.append(t) return tags def recursive_find(msg, li, url): if msg['url'] == url: msg['list'] = li # <-- taggin return msg if 'follow-up' in list(msg.keys()): for m in msg['follow-up']: f = recursive_find(m, li, url) if f is not None: return msg # <-- parent thread return None def recursive_delete(msg, parent_list, url): if msg['url'] == url: parent_list.remove(msg) return True d = False if 'follow-up' in list(msg.keys()): for m in msg['follow-up']: d = d | recursive_delete(m, msg['follow-up'], url) return d def find(li, url): d = os.path.join(config.archives, li) if not os.path.isdir(d): logging.warning("Invalid archive path: " + d) print("Invalid archive path: " + d) return None dir_files = [f for f in glob.glob(os.path.join(d, "*.json"))] for f in dir_files: with open(f, encoding='utf-8') as fp: dj = json.load(fp) for msg in dj['threads']: f = recursive_find(msg, li, url) if f is not None: return f return None def recursive_urls(msg): r = [msg['url']] if 'follow-up' in list(msg.keys()): for m in msg['follow-up']: r += recursive_urls(m) return r #