|
1 |
| -import logging |
2 |
| -import argparse |
3 |
| -from typing import List, Pattern |
4 |
| -from pprint import pprint |
5 |
| -from collections import defaultdict |
6 |
| -from datetime import timedelta |
7 |
| - |
8 |
| -from aw_transform import heartbeat_reduce |
9 |
| -from aw_transform.flood import flood |
10 |
| -from aw_transform.simplify import simplify_string |
11 |
| - |
12 |
| -from aw_client import ActivityWatchClient |
13 |
| - |
14 |
| -from aw_research.redact import redact_words |
15 |
| -from aw_research.algorithmia import run_sentiment, run_LDA |
16 |
| -from aw_research.merge import merge_close_and_similar |
17 |
| -from aw_research.classify import _main as _main_classify |
18 |
| -from aw_research.classify import _build_argparse as _build_argparse_classify |
19 |
| - |
20 |
| -logging.basicConfig(level=logging.INFO) |
21 |
| -logger = logging.getLogger(__name__) |
22 |
| - |
23 |
| - |
24 |
| -def assert_no_overlap(events): |
25 |
| - overlap = False |
26 |
| - events = sorted(events, key=lambda e: e.timestamp) |
27 |
| - for e1, e2 in zip(events[:-1], events[1:]): |
28 |
| - e1_end = e1.timestamp + e1.duration |
29 |
| - gap = e2.timestamp - e1_end |
30 |
| - if gap < timedelta(0): |
31 |
| - logger.warning("Events overlapped: {}".format(gap)) |
32 |
| - overlap = True |
33 |
| - assert not overlap |
34 |
| - |
35 |
| - |
36 |
| -def _get_window_events(n=1000): |
37 |
| - client = ActivityWatchClient("aw-analyser", testing=True) |
38 |
| - buckets = client.get_buckets() |
39 |
| - |
40 |
| - bucket_id = None |
41 |
| - for _bid in buckets.keys(): |
42 |
| - if "window" in _bid and "testing" not in _bid: |
43 |
| - bucket_id = _bid |
44 |
| - |
45 |
| - if bucket_id: |
46 |
| - return client.get_events(bucket_id, limit=n) |
47 |
| - else: |
48 |
| - print("Did not find bucket") |
49 |
| - return [] |
50 |
| - |
51 |
| - |
52 |
| -def _main_redact(pattern: str, ignore_case: bool): |
53 |
| - logger.info("Retrieving events...") |
54 |
| - events = _get_window_events() |
55 |
| - |
56 |
| - logger.info("Redacting using regular expression: " + pattern) |
57 |
| - events = redact_words(events, pattern, ignore_case=ignore_case) |
58 |
| - |
59 |
| - print("NOTE: Redactions are not persisted to server") |
60 |
| - |
61 |
| - |
62 |
| -def _main_analyse(): |
63 |
| - logger.info("Retrieving events...") |
64 |
| - events = _get_window_events() |
65 |
| - |
66 |
| - logger.info("Running analysis...") |
67 |
| - titles = list({e.data["title"] for e in events}) |
68 |
| - out = run_LDA(titles) |
69 |
| - pprint(out.result) |
70 |
| - |
71 |
| - out = run_sentiment(titles) |
72 |
| - pprint([r for r in out.result if r["sentiment"] != 0]) |
73 |
| - |
74 |
| - out = run_sentiment(" ".join(titles)) |
75 |
| - pprint([r for r in out.result if r["sentiment"] != 0]) |
76 |
| - |
77 |
| - |
78 |
| -def _main_merge(): |
79 |
| - logger.info("Retrieving events...") |
80 |
| - events = _get_window_events(n=1000) |
81 |
| - events = simplify_string(events) |
82 |
| - |
83 |
| - merged_events = merge_close_and_similar(events) |
84 |
| - print("{} events became {} after merging of similar ones".format(len(events), len(merged_events))) |
85 |
| - |
86 |
| - # Debugging |
87 |
| - assert_no_overlap(events) |
88 |
| - assert_no_overlap(merged_events) |
89 |
| - print_most_common_titles(events) |
90 |
| - print_most_common_titles(merged_events) |
91 |
| - |
92 |
| - |
93 |
| -def _main_heartbeat_reduce(): |
94 |
| - logger.info("Retrieving events...") |
95 |
| - events = _get_window_events() |
96 |
| - events = simplify_string(events) |
97 |
| - |
98 |
| - logger.info("Beating hearts together...") |
99 |
| - merged_events = heartbeat_reduce(events, pulsetime=10) |
100 |
| - |
101 |
| - # Debugging |
102 |
| - assert_no_overlap(events) |
103 |
| - assert_no_overlap(merged_events) |
104 |
| - print_most_common_titles(events) |
105 |
| - print_most_common_titles(merged_events) |
106 |
| - |
107 |
| - |
108 |
| -def _main_flood(): |
109 |
| - logger.info("Retrieving events...") |
110 |
| - events = _get_window_events() |
111 |
| - events = simplify_string(events) |
112 |
| - |
113 |
| - logger.info("Flooding...") |
114 |
| - merged_events = flood(events) |
115 |
| - |
116 |
| - # Debugging |
117 |
| - assert_no_overlap(events) |
118 |
| - assert_no_overlap(merged_events) |
119 |
| - print_most_common_titles(events) |
120 |
| - print_most_common_titles(merged_events) |
121 |
| - |
122 |
| - |
123 |
| -def print_most_common_titles(events): |
124 |
| - counter = defaultdict(lambda: timedelta(0)) |
125 |
| - for e in events: |
126 |
| - counter[e.data["title"]] += e.duration |
127 |
| - |
128 |
| - print("-" * 30) |
129 |
| - |
130 |
| - def total_duration(events): |
131 |
| - return sum((e.duration for e in events), timedelta(0)) |
132 |
| - print("Total duration: {}".format(total_duration(events))) |
133 |
| - |
134 |
| - pairs = sorted(zip(counter.values(), counter.keys()), reverse=True) |
135 |
| - for duration, title in pairs[:15]: |
136 |
| - print("{:15s} - {}".format(str(duration), title)) |
137 |
| - |
138 |
| - print("-" * 30) |
| 1 | +import aw_research.main |
139 | 2 |
|
140 | 3 |
|
141 | 4 | if __name__ == "__main__":
|
142 |
| - parser = argparse.ArgumentParser() |
143 |
| - subparsers = parser.add_subparsers(dest="cmd") |
144 |
| - redact = subparsers.add_parser('redact') |
145 |
| - redact.add_argument('pattern', help="Regular expression to match events with, a good example that matches on 3 words: \b(sensitive|secret|)\b") |
146 |
| - redact.add_argument('--ignore-case', action='store_true', help="Ignore case sensitivity (the pattern and all strings are lowercased before matching)") |
147 |
| - subparsers.add_parser('analyse') |
148 |
| - subparsers.add_parser('merge') |
149 |
| - subparsers.add_parser('flood') |
150 |
| - subparsers.add_parser('heartbeat') |
151 |
| - classify = subparsers.add_parser('classify') |
152 |
| - _build_argparse_classify(classify) |
153 |
| - |
154 |
| - args = parser.parse_args() |
155 |
| - |
156 |
| - if args.cmd == "redact": |
157 |
| - _main_redact(args.pattern, args.ignore_case) |
158 |
| - elif args.cmd == "analyse": |
159 |
| - _main_analyse() |
160 |
| - elif args.cmd == "merge": |
161 |
| - _main_merge() |
162 |
| - elif args.cmd == "flood": |
163 |
| - _main_flood() |
164 |
| - elif args.cmd == "heartbeat": |
165 |
| - _main_heartbeat_reduce() |
166 |
| - elif args.cmd == "classify": |
167 |
| - _main_classify(args) |
168 |
| - else: |
169 |
| - parser.print_usage() |
| 5 | + aw_research.main.main() |
0 commit comments