1from datetime import datetime
2
3from loguru import logger
4from sqlalchemy import Column, DateTime, Integer, String, Unicode
5
6from flexget import db_schema, plugin
7from flexget.db_schema import Session
8from flexget.entry import Entry
9from flexget.event import event
10from flexget.utils import qualities
11from flexget.utils.database import quality_property
12from flexget.utils.tools import group_entries, parse_timedelta
13
14logger = logger.bind(name='timeframe')
15
16Base = db_schema.versioned_base('upgrade', 0)
17
18entry_actions = {'accept': Entry.accept, 'reject': Entry.reject}
19
20
21class EntryTimeFrame(Base):
22    __tablename__ = 'timeframe'
23
24    id = Column(Unicode, primary_key=True, index=True)
25    status = Column(Unicode)
26    title = Column(Unicode)
27    _quality = Column('quality', String)
28    quality = quality_property('_quality')
29    first_seen = Column(DateTime, default=datetime.now())
30    proper_count = Column(Integer, default=0)
31
32    def __str__(self):
33        return '<Timeframe(id=%s,added=%s,quality=%s)>' % (self.id, self.added, self.quality)
34
35
36class FilterTimeFrame:
37    schema = {
38        'type': 'object',
39        'properties': {
40            'identified_by': {'type': 'string', 'default': 'auto'},
41            'target': {'type': 'string', 'format': 'quality_requirements'},
42            'wait': {'type': 'string', 'format': 'interval'},
43            'on_waiting': {
44                'type': 'string',
45                'enum': ['accept', 'reject', 'do_nothing'],
46                'default': 'reject',
47            },
48            'on_reached': {
49                'type': 'string',
50                'enum': ['accept', 'reject', 'do_nothing'],
51                'default': 'do_nothing',
52            },
53        },
54        'required': ['target', 'wait'],
55        'additionalProperties': False,
56    }
57
58    # Run last so we work on only accepted entries
59    @plugin.priority(plugin.PRIORITY_LAST)
60    def on_task_filter(self, task, config):
61        if not config:
62            return
63
64        identified_by = (
65            '{{ media_id }}' if config['identified_by'] == 'auto' else config['identified_by']
66        )
67
68        grouped_entries = group_entries(task.accepted, identified_by)
69        if not grouped_entries:
70            return
71
72        action_on_waiting = (
73            entry_actions[config['on_waiting']] if config['on_waiting'] != 'do_nothing' else None
74        )
75        action_on_reached = (
76            entry_actions[config['on_reached']] if config['on_reached'] != 'do_nothing' else None
77        )
78
79        with Session() as session:
80            # Prefetch Data
81            existing_ids = (
82                session.query(EntryTimeFrame)
83                .filter(EntryTimeFrame.id.in_(grouped_entries.keys()))
84                .all()
85            )
86            existing_ids = {e.id: e for e in existing_ids}
87
88            for identifier, entries in grouped_entries.items():
89                if not entries:
90                    continue
91
92                id_timeframe = existing_ids.get(identifier)
93                if not id_timeframe:
94                    id_timeframe = EntryTimeFrame()
95                    id_timeframe.id = identifier
96                    id_timeframe.status = 'waiting'
97                    id_timeframe.first_seen = datetime.now()
98                    session.add(id_timeframe)
99
100                if id_timeframe.status == 'accepted':
101                    logger.debug(
102                        'Previously accepted {} with {} skipping', identifier, id_timeframe.title
103                    )
104                    continue
105
106                # Sort entities in order of quality and best proper
107                entries.sort(key=lambda e: (e['quality'], e.get('proper_count', 0)), reverse=True)
108                best_entry = entries[0]
109
110                logger.debug(
111                    'Current best for identifier {} is {}', identifier, best_entry['title']
112                )
113
114                id_timeframe.title = best_entry['title']
115                id_timeframe.quality = best_entry['quality']
116                id_timeframe.proper_count = best_entry.get('proper_count', 0)
117
118                # Check we hit target or better
119                target_requirement = qualities.Requirements(config['target'])
120                target_quality = qualities.Quality(config['target'])
121                if (
122                    target_requirement.allows(best_entry['quality'])
123                    or best_entry['quality'] >= target_quality
124                ):
125                    logger.debug(
126                        'timeframe reach target quality {} or higher for {}',
127                        target_quality,
128                        identifier,
129                    )
130                    if action_on_reached:
131                        action_on_reached(best_entry, 'timeframe reached target quality or higher')
132                    continue
133
134                # Check if passed wait time
135                expires = id_timeframe.first_seen + parse_timedelta(config['wait'])
136                if expires <= datetime.now():
137                    logger.debug(
138                        'timeframe expired, releasing quality restriction for {}', identifier
139                    )
140                    if action_on_reached:
141                        action_on_reached(best_entry, 'timeframe wait expired')
142                    continue
143
144                # Verbose waiting, add to backlog
145                if action_on_waiting:
146                    for entry in entries:
147                        action_on_waiting(entry, 'timeframe waiting')
148                diff = expires - datetime.now()
149                hours, remainder = divmod(diff.seconds, 3600)
150                hours += diff.days * 24
151                minutes, _ = divmod(remainder, 60)
152
153                logger.info(
154                    '`{}`: timeframe waiting for {:02d}h:{:02d}min. Currently best is `{}`.',
155                    identifier,
156                    hours,
157                    minutes,
158                    best_entry['title'],
159                )
160
161                # add best entry to backlog (backlog is able to handle duplicate adds)
162                plugin.get('backlog', self).add_backlog(task, best_entry, session=session)
163
164    def on_task_learn(self, task, config):
165        if not config:
166            return
167
168        identified_by = (
169            '{{ media_id }}' if config['identified_by'] == 'auto' else config['identified_by']
170        )
171
172        grouped_entries = group_entries(task.accepted, identified_by)
173        if not grouped_entries:
174            return
175
176        with Session() as session:
177            # Prefetch Data
178            existing_ids = (
179                session.query(EntryTimeFrame)
180                .filter(EntryTimeFrame.id.in_(grouped_entries.keys()))
181                .all()
182            )
183            existing_ids = {e.id: e for e in existing_ids}
184
185            for identifier, entries in grouped_entries.items():
186                if not entries:
187                    continue
188
189                id_timeframe = existing_ids.get(identifier)
190
191                if not id_timeframe:
192                    continue
193
194                # Sort entities in order of quality
195                entries.sort(key=lambda e: e['quality'], reverse=True)
196
197                # First entry will be the best quality
198                best_entry = entries[0]
199
200                id_timeframe.quality = best_entry['quality']
201                id_timeframe.title = best_entry['title']
202                id_timeframe.proper_count = best_entry.get('proper_count', 0)
203                id_timeframe.status = 'accepted'
204
205
206@event('plugin.register')
207def register_plugin():
208    plugin.register(FilterTimeFrame, 'timeframe', api_ver=2)
209