Implement frecency
This commit is contained in:
parent
ee13dac738
commit
9f10d645fc
|
|
@ -137,7 +137,9 @@
|
|||
|<<completion.timestamp_format,completion.timestamp_format>>|Format of timestamps (e.g. for the history completion).
|
||||
|<<completion.use_best_match,completion.use_best_match>>|Execute the best-matching command on a partial match.
|
||||
|<<completion.web_history.exclude,completion.web_history.exclude>>|A list of patterns which should not be shown in the history.
|
||||
|<<completion.web_history.frecency_bonus,completion.web_history.frecency_bonus>>|How many seconds to award as a bonus per visit for frecency.
|
||||
|<<completion.web_history.max_items,completion.web_history.max_items>>|Number of URLs to show in the web history.
|
||||
|<<completion.web_history.sort_criterion,completion.web_history.sort_criterion>>|How to sort URLs in the web history.
|
||||
|<<confirm_quit,confirm_quit>>|Require a confirmation before quitting the application.
|
||||
|<<content.autoplay,content.autoplay>>|Automatically start playing `<video>` elements.
|
||||
|<<content.blocking.adblock.lists,content.blocking.adblock.lists>>|List of URLs to ABP-style adblocking rulesets.
|
||||
|
|
@ -1919,6 +1921,15 @@ Type: <<types,List of UrlPattern>>
|
|||
|
||||
Default: empty
|
||||
|
||||
[[completion.web_history.frecency_bonus]]
|
||||
=== completion.web_history.frecency_bonus
|
||||
How many seconds to award as a bonus per visit for frecency.
|
||||
Frecency is calculated as `last_visit_timestamp + (visits - 1) * frecency_bonus`, where `last_visit_timestamp` is measured in seconds since the Epoch. This value defines how much the frecency for a URL will increase with each visit.
|
||||
|
||||
Type: <<types,Int>>
|
||||
|
||||
Default: +pass:[43200]+
|
||||
|
||||
[[completion.web_history.max_items]]
|
||||
=== completion.web_history.max_items
|
||||
Number of URLs to show in the web history.
|
||||
|
|
@ -1928,6 +1939,20 @@ Type: <<types,Int>>
|
|||
|
||||
Default: +pass:[-1]+
|
||||
|
||||
[[completion.web_history.sort_criterion]]
|
||||
=== completion.web_history.sort_criterion
|
||||
How to sort URLs in the web history.
|
||||
|
||||
Type: <<types,String>>
|
||||
|
||||
Valid values:
|
||||
|
||||
* +recency+: Entries are sorted from most recently visited to least recently visited.
|
||||
* +frequency+: Entries are sorted from most visited to least visited.
|
||||
* +frecency+: Entries are sorted using a combination of recency and frequency.
|
||||
|
||||
Default: +pass:[recency]+
|
||||
|
||||
[[confirm_quit]]
|
||||
=== confirm_quit
|
||||
Require a confirmation before quitting the application.
|
||||
|
|
|
|||
|
|
@ -83,6 +83,7 @@ class CompletionMetaInfo(sql.SqlTable):
|
|||
KEYS = {
|
||||
'excluded_patterns': '',
|
||||
'force_rebuild': False,
|
||||
'frecency_bonus': -1,
|
||||
}
|
||||
|
||||
def __init__(self, database: sql.Database,
|
||||
|
|
@ -133,14 +134,22 @@ class CompletionHistory(sql.SqlTable):
|
|||
|
||||
"""History which only has the newest entry for each URL."""
|
||||
|
||||
def __init__(self, database: sql.Database,
|
||||
parent: Optional[QObject] = None) -> None:
|
||||
super().__init__(database, "CompletionHistory", ['url', 'title', 'last_atime'],
|
||||
def __init__(self, database: sql.Database, parent: Optional[QObject] = None,
|
||||
force_creation: bool = False) -> None:
|
||||
super().__init__(database, "CompletionHistory", ['url', 'title', 'last_atime',
|
||||
'visits', 'frecency'],
|
||||
constraints={'url': 'PRIMARY KEY',
|
||||
'title': 'NOT NULL',
|
||||
'last_atime': 'NOT NULL'},
|
||||
parent=parent)
|
||||
self.create_index('CompletionHistoryAtimeIndex', 'last_atime')
|
||||
'last_atime': 'NOT NULL',
|
||||
'visits': 'NOT NULL',
|
||||
'frecency': 'NOT NULL'},
|
||||
parent=parent, force_creation=force_creation)
|
||||
self.create_index('CompletionHistoryAtimeIndex', 'last_atime',
|
||||
force=force_creation)
|
||||
self.create_index('CompletionHistoryVisitsIndex', 'visits',
|
||||
force=force_creation)
|
||||
self.create_index('CompletionHistoryFrecencyIndex', 'frecency',
|
||||
force=force_creation)
|
||||
|
||||
|
||||
class WebHistory(sql.SqlTable):
|
||||
|
|
@ -170,7 +179,6 @@ class WebHistory(sql.SqlTable):
|
|||
# Store the last saved url to avoid duplicate immediate saves.
|
||||
self._last_url = None
|
||||
|
||||
self.completion = CompletionHistory(database, parent=self)
|
||||
self.metainfo = CompletionMetaInfo(database, parent=self)
|
||||
|
||||
try:
|
||||
|
|
@ -202,10 +210,22 @@ class WebHistory(sql.SqlTable):
|
|||
self.metainfo['excluded_patterns'] = patterns
|
||||
rebuild_completion = True
|
||||
|
||||
if rebuild_completion and self:
|
||||
# If no history exists, we don't need to spawn a dialog for
|
||||
# cleaning it up.
|
||||
self._rebuild_completion()
|
||||
frecency_bonus = config.val.completion.web_history.frecency_bonus
|
||||
if self.metainfo['frecency_bonus'] != frecency_bonus:
|
||||
rebuild_completion = True
|
||||
|
||||
if rebuild_completion:
|
||||
self.database.query("DROP TABLE IF EXISTS CompletionHistory").run()
|
||||
self.completion = CompletionHistory(database, parent=self,
|
||||
force_creation=True)
|
||||
if self:
|
||||
# If no history exists, we don't need to spawn a dialog for
|
||||
# cleaning it up.
|
||||
self._rebuild_completion()
|
||||
self.metainfo['frecency_bonus'] = frecency_bonus
|
||||
self.metainfo['force_rebuild'] = False
|
||||
else:
|
||||
self.completion = CompletionHistory(database, parent=self)
|
||||
|
||||
self.create_index('HistoryIndex', 'url')
|
||||
self.create_index('HistoryAtimeIndex', 'atime')
|
||||
|
|
@ -282,29 +302,32 @@ class WebHistory(sql.SqlTable):
|
|||
data: Mapping[str, MutableSequence[str]] = {
|
||||
'url': [],
|
||||
'title': [],
|
||||
'last_atime': []
|
||||
'last_atime': [],
|
||||
'visits': [],
|
||||
'frecency': [],
|
||||
}
|
||||
|
||||
self._progress.start(
|
||||
"<b>Rebuilding completion...</b><br>"
|
||||
"This is a one-time operation and happens because the database version "
|
||||
"or <i>completion.web_history.exclude</i> was changed."
|
||||
"This is a one-time operation and happens because the database version, "
|
||||
"<i>completion.web_history.exclude</i>, or "
|
||||
"<i>completion.web_history.frecency_bonus</i> were changed."
|
||||
)
|
||||
|
||||
# Delete old entries
|
||||
self.completion.delete_all()
|
||||
QApplication.processEvents()
|
||||
|
||||
# Select the latest entry for each url
|
||||
q = self.database.query('SELECT url, title, max(atime) AS atime FROM History '
|
||||
'WHERE NOT redirect '
|
||||
'GROUP BY url ORDER BY atime asc')
|
||||
q = self.database.query('''
|
||||
SELECT url, title, max(atime) AS atime, count(*) AS visits
|
||||
FROM History
|
||||
WHERE NOT redirect
|
||||
GROUP BY url ORDER BY atime asc
|
||||
''')
|
||||
result = q.run()
|
||||
QApplication.processEvents()
|
||||
entries = list(result)
|
||||
|
||||
self._progress.set_maximum(len(entries))
|
||||
|
||||
frecency_bonus = config.val.completion.web_history.frecency_bonus
|
||||
for entry in entries:
|
||||
self._progress.tick()
|
||||
|
||||
|
|
@ -314,6 +337,8 @@ class WebHistory(sql.SqlTable):
|
|||
data['url'].append(self._format_completion_url(url))
|
||||
data['title'].append(entry.title)
|
||||
data['last_atime'].append(entry.atime)
|
||||
data['visits'].append(entry.visits)
|
||||
data['frecency'].append((entry.visits - 1) * frecency_bonus + entry.atime)
|
||||
|
||||
self._progress.set_maximum(0)
|
||||
|
||||
|
|
@ -325,7 +350,6 @@ class WebHistory(sql.SqlTable):
|
|||
QApplication.processEvents()
|
||||
|
||||
self._progress.finish()
|
||||
self.metainfo['force_rebuild'] = False
|
||||
|
||||
def get_recent(self):
|
||||
"""Get the most recent history entries."""
|
||||
|
|
@ -421,11 +445,24 @@ class WebHistory(sql.SqlTable):
|
|||
if redirect or self._is_excluded_from_completion(url):
|
||||
return
|
||||
|
||||
self.completion.insert({
|
||||
'url': self._format_completion_url(url),
|
||||
f_url = self._format_completion_url(url)
|
||||
result = self.completion.insert({
|
||||
'url': f_url,
|
||||
'title': title,
|
||||
'last_atime': atime
|
||||
}, replace=True)
|
||||
'last_atime': atime,
|
||||
'visits': 1,
|
||||
'frecency': atime,
|
||||
}, ignore=True)
|
||||
|
||||
if not result.rows_affected():
|
||||
frecency_bonus = config.val.completion.web_history.frecency_bonus
|
||||
update = {
|
||||
'visits': 'visits + 1',
|
||||
'frecency': f'{atime} + visits * {frecency_bonus}',
|
||||
'last_atime': atime
|
||||
}
|
||||
|
||||
self.completion.update(update, {'url': f_url})
|
||||
|
||||
def _format_url(self, url):
|
||||
return url.toString(QUrl.UrlFormattingOption.RemovePassword | QUrl.ComponentFormattingOption.FullyEncoded)
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ from qutebrowser.qt.sql import QSqlQueryModel
|
|||
from qutebrowser.qt.widgets import QWidget
|
||||
|
||||
from qutebrowser.misc import sql
|
||||
from qutebrowser.utils import debug, message, log
|
||||
from qutebrowser.utils import debug, message, log, utils
|
||||
from qutebrowser.config import config
|
||||
from qutebrowser.completion.models import util, BaseCategory
|
||||
|
||||
|
|
@ -33,27 +33,6 @@ class HistoryCategory(QSqlQueryModel, BaseCategory):
|
|||
self.delete_func = delete_func
|
||||
self._empty_prefix: Optional[str] = None
|
||||
|
||||
def _atime_expr(self):
|
||||
"""If max_items is set, return an expression to limit the query."""
|
||||
max_items = config.val.completion.web_history.max_items
|
||||
# HistoryCategory should not be added to the completion in that case.
|
||||
assert max_items != 0
|
||||
|
||||
if max_items < 0:
|
||||
return ''
|
||||
|
||||
min_atime = self._database.query(' '.join([
|
||||
'SELECT min(last_atime) FROM',
|
||||
'(SELECT last_atime FROM CompletionHistory',
|
||||
'ORDER BY last_atime DESC LIMIT :limit)',
|
||||
])).run(limit=max_items).value()
|
||||
|
||||
if not min_atime:
|
||||
# if there are no history items, min_atime may be '' (issue #2849)
|
||||
return ''
|
||||
|
||||
return "AND last_atime >= {}".format(min_atime)
|
||||
|
||||
def set_pattern(self, pattern):
|
||||
"""Set the pattern used to filter results.
|
||||
|
||||
|
|
@ -74,38 +53,54 @@ class HistoryCategory(QSqlQueryModel, BaseCategory):
|
|||
pattern = pattern.replace('_', '\\_')
|
||||
words = ['%{}%'.format(w) for w in pattern.split(' ')]
|
||||
|
||||
# build a where clause to match all of the words in any order
|
||||
# given the search term "a b", the WHERE clause would be:
|
||||
# (url LIKE '%a%' OR title LIKE '%a%') AND
|
||||
# (url LIKE '%b%' OR title LIKE '%b%')
|
||||
where_clause = ' AND '.join(
|
||||
"(url LIKE :{val} escape '\\' OR title LIKE :{val} escape '\\')"
|
||||
.format(val=i) for i in range(len(words)))
|
||||
|
||||
# replace ' in timestamp-format to avoid breaking the query
|
||||
timestamp_format = config.val.completion.timestamp_format or ''
|
||||
timefmt = ("strftime('{}', last_atime, 'unixepoch', 'localtime')"
|
||||
.format(timestamp_format.replace("'", "`")))
|
||||
|
||||
try:
|
||||
if (not self._query or
|
||||
len(words) != len(self._query.bound_values())):
|
||||
if (not self._query or len(words) != len(self._query.bound_values())):
|
||||
# if the number of words changed, we need to generate a new
|
||||
# query otherwise, we can reuse the prepared query for
|
||||
# performance
|
||||
self._query = self._database.query(' '.join([
|
||||
"SELECT url, title, {}".format(timefmt),
|
||||
"FROM CompletionHistory",
|
||||
# query, otherwise we can reuse the prepared query for performance
|
||||
max_items = config.val.completion.web_history.max_items
|
||||
# HistoryCategory should not be added to the completion in that case.
|
||||
assert max_items != 0
|
||||
|
||||
sort_criterion = config.val.completion.web_history.sort_criterion
|
||||
if sort_criterion == 'recency':
|
||||
sort_column = 'last_atime DESC'
|
||||
elif sort_criterion == 'frequency':
|
||||
sort_column = 'visits DESC, last_atime DESC'
|
||||
elif sort_criterion == 'frecency':
|
||||
sort_column = 'frecency DESC'
|
||||
else:
|
||||
raise utils.Unreachable(sort_criterion)
|
||||
|
||||
# build a where clause to match all of the words in any order
|
||||
# given the search term "a b", the WHERE clause would be:
|
||||
# (url LIKE '%a%' OR title LIKE '%a%') AND
|
||||
# (url LIKE '%b%' OR title LIKE '%b%')
|
||||
where_clause = ' AND '.join(
|
||||
f"(url LIKE :{i} escape '\\' OR title LIKE :{i} escape '\\')"
|
||||
for i in range(len(words))
|
||||
)
|
||||
|
||||
# replace ' in timestamp-format to avoid breaking the query
|
||||
timestamp_format = (
|
||||
(config.val.completion.timestamp_format or '').replace("'", "`")
|
||||
)
|
||||
timefmt = (f"strftime('{timestamp_format}', last_atime, 'unixepoch', "
|
||||
"'localtime')")
|
||||
|
||||
self._query = self._database.query(
|
||||
f"SELECT url, title, {timefmt} "
|
||||
"FROM CompletionHistory "
|
||||
# FIXME: does this comment belong here?
|
||||
# the incoming pattern will have literal % and _ escaped we
|
||||
# need to tell SQL to treat '\' as an escape character
|
||||
'WHERE ({})'.format(where_clause),
|
||||
self._atime_expr(),
|
||||
"ORDER BY last_atime DESC",
|
||||
]), forward_only=False)
|
||||
f"WHERE ({where_clause}) "
|
||||
f"ORDER BY {sort_column} "
|
||||
f"LIMIT {max_items}",
|
||||
forward_only=False
|
||||
)
|
||||
|
||||
with debug.log_time('sql', 'Running completion query'):
|
||||
self._query.run(**{
|
||||
str(i): w for i, w in enumerate(words)})
|
||||
self._query.run(**{str(i): w for i, w in enumerate(words)})
|
||||
except sql.KnownError as e:
|
||||
# Sometimes, the query we built up was invalid, for example,
|
||||
# due to a large amount of words.
|
||||
|
|
|
|||
|
|
@ -1354,6 +1354,30 @@ completion.cmd_history_max_items:
|
|||
|
||||
0: no history / -1: unlimited
|
||||
|
||||
completion.web_history.sort_criterion:
|
||||
default: recency
|
||||
type:
|
||||
name: String
|
||||
valid_values:
|
||||
- recency: Entries are sorted from most recently visited to least
|
||||
recently visited.
|
||||
- frequency: Entries are sorted from most visited to least visited.
|
||||
- frecency: Entries are sorted using a combination of recency and
|
||||
frequency.
|
||||
desc: How to sort URLs in the web history.
|
||||
|
||||
completion.web_history.frecency_bonus:
|
||||
default: 43200
|
||||
type:
|
||||
name: Int
|
||||
minval: 0
|
||||
desc: >-
|
||||
How many seconds to award as a bonus per visit for frecency.
|
||||
Frecency is calculated as `last_visit_timestamp + (visits - 1)
|
||||
* frecency_bonus`, where `last_visit_timestamp` is measured in seconds
|
||||
since the Epoch. This value defines how much the frecency for a URL will
|
||||
increase with each visit.
|
||||
|
||||
completion.height:
|
||||
type:
|
||||
name: PercOrInt
|
||||
|
|
|
|||
|
|
@ -181,7 +181,7 @@ class Database:
|
|||
|
||||
"""A wrapper over a QSqlDatabase connection."""
|
||||
|
||||
_USER_VERSION = UserVersion(0, 4) # The current / newest user version
|
||||
_USER_VERSION = UserVersion(1, 0) # The current / newest user version
|
||||
|
||||
def __init__(self, path: str) -> None:
|
||||
if QSqlDatabase.database(path).isValid():
|
||||
|
|
@ -429,7 +429,8 @@ class SqlTable(QObject):
|
|||
|
||||
def __init__(self, database: Database, name: str, fields: list[str],
|
||||
constraints: Optional[dict[str, str]] = None,
|
||||
parent: Optional[QObject] = None) -> None:
|
||||
parent: Optional[QObject] = None,
|
||||
force_creation: bool = False) -> None:
|
||||
"""Wrapper over a table in the SQL database.
|
||||
|
||||
Args:
|
||||
|
|
@ -437,11 +438,12 @@ class SqlTable(QObject):
|
|||
name: Name of the table.
|
||||
fields: A list of field names.
|
||||
constraints: A dict mapping field names to constraint strings.
|
||||
force_creation: Force the creation of the table.
|
||||
"""
|
||||
super().__init__(parent)
|
||||
self._name = name
|
||||
self.database = database
|
||||
self._create_table(fields, constraints)
|
||||
self._create_table(fields, constraints, force=force_creation)
|
||||
|
||||
def _create_table(self, fields: list[str], constraints: Optional[dict[str, str]],
|
||||
*, force: bool = False) -> None:
|
||||
|
|
@ -461,14 +463,15 @@ class SqlTable(QObject):
|
|||
)
|
||||
q.run()
|
||||
|
||||
def create_index(self, name: str, field: str) -> None:
|
||||
def create_index(self, name: str, field: str, force: bool = False) -> None:
|
||||
"""Create an index over this table if the database is uninitialized.
|
||||
|
||||
Args:
|
||||
name: Name of the index, should be unique.
|
||||
field: Name of the field to index.
|
||||
force: If true, create the index even if the database is uninitialized.
|
||||
"""
|
||||
if not self.database.user_version_changed():
|
||||
if not self.database.user_version_changed() and not force:
|
||||
return
|
||||
|
||||
q = self.database.query(
|
||||
|
|
@ -517,37 +520,67 @@ class SqlTable(QObject):
|
|||
raise KeyError(f'No row with {field} = {value!r}')
|
||||
self.changed.emit()
|
||||
|
||||
def _insert_query(self, values: Mapping[str, Any], replace: bool) -> Query:
|
||||
def _insert_query(self, values: Mapping[str, Any], replace: bool,
|
||||
ignore: bool) -> Query:
|
||||
if replace and ignore:
|
||||
raise ValueError('replace and ignore cannot be True at the same time')
|
||||
|
||||
params = ', '.join(f':{key}' for key in values)
|
||||
columns = ', '.join(values)
|
||||
verb = "REPLACE" if replace else "INSERT"
|
||||
verb = 'INSERT'
|
||||
if replace:
|
||||
verb += ' OR REPLACE'
|
||||
elif ignore:
|
||||
verb += ' OR IGNORE'
|
||||
return self.database.query(
|
||||
f"{verb} INTO {self._name} ({columns}) values({params})"
|
||||
)
|
||||
|
||||
def insert(self, values: Mapping[str, Any], replace: bool = False) -> None:
|
||||
def insert(self, values: Mapping[str, Any], replace: bool = False,
|
||||
ignore: bool = False) -> Query:
|
||||
"""Append a row to the table.
|
||||
|
||||
Args:
|
||||
values: A dict with a value to insert for each field name.
|
||||
replace: If set, replace existing values.
|
||||
ignore: If set, ignore constraint errors.
|
||||
"""
|
||||
q = self._insert_query(values, replace)
|
||||
q.run(**values)
|
||||
q = self._insert_query(values, replace, ignore)
|
||||
result = q.run(**values)
|
||||
self.changed.emit()
|
||||
return result
|
||||
|
||||
def insert_batch(self, values: Mapping[str, MutableSequence[Any]],
|
||||
replace: bool = False) -> None:
|
||||
replace: bool = False, ignore: bool = False) -> None:
|
||||
"""Performantly append multiple rows to the table.
|
||||
|
||||
Args:
|
||||
values: A dict with a list of values to insert for each field name.
|
||||
replace: If true, overwrite rows with a primary key match.
|
||||
ignore: If true, ignore constraint errors.
|
||||
"""
|
||||
q = self._insert_query(values, replace)
|
||||
q = self._insert_query(values, replace, ignore)
|
||||
q.run_batch(values)
|
||||
self.changed.emit()
|
||||
|
||||
def update(self, update: Mapping[str, Any], where: Mapping[str, Any]) -> Query:
|
||||
"""Execute update rows statement.
|
||||
|
||||
Args:
|
||||
update: column:value dict with new values to set
|
||||
where: column:value dict for filtering
|
||||
"""
|
||||
u = ', '.join(f'{k} = {v}' for k, v in update.items())
|
||||
s = f'UPDATE {self._name} SET {u}'
|
||||
|
||||
if where:
|
||||
w = ' AND '.join(f'{k} = :{k}' for k in where.keys())
|
||||
s += f' WHERE {w}'
|
||||
|
||||
result = self.database.query(s).run(**where)
|
||||
self.changed.emit()
|
||||
return result
|
||||
|
||||
def delete_all(self) -> None:
|
||||
"""Remove all rows from the table."""
|
||||
self.database.query(f"DELETE FROM {self._name}").run()
|
||||
|
|
|
|||
|
|
@ -121,7 +121,7 @@ class TestDelete:
|
|||
|
||||
completion_diff = completion_before.difference(
|
||||
set(web_history.completion))
|
||||
assert completion_diff == {(raw, '', 0)}
|
||||
assert completion_diff == {(raw, '', 0, 1, 0)}
|
||||
|
||||
|
||||
class TestAdd:
|
||||
|
|
@ -152,7 +152,7 @@ class TestAdd:
|
|||
if completion_url is None:
|
||||
assert not len(web_history.completion)
|
||||
else:
|
||||
expected = [(completion_url, title, atime)]
|
||||
expected = [(completion_url, title, atime, 1, atime)]
|
||||
assert list(web_history.completion) == expected
|
||||
|
||||
def test_no_sql_web_history(self, web_history, monkeypatch):
|
||||
|
|
@ -171,7 +171,7 @@ class TestAdd:
|
|||
@pytest.mark.parametrize('completion', [True, False])
|
||||
def test_error(self, monkeypatch, web_history, message_mock, caplog,
|
||||
known_error, completion):
|
||||
def raise_error(url, replace=False):
|
||||
def raise_error(url, replace=False, ignore=False):
|
||||
if known_error:
|
||||
raise sql.KnownError("Error message")
|
||||
raise sql.BugError("Error message")
|
||||
|
|
@ -255,6 +255,42 @@ class TestAdd:
|
|||
web_history.add_from_tab(QUrl(url), QUrl(url), 'title')
|
||||
assert list(web_history) == hist
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'urls, expected', [
|
||||
(['http://a', 'http://b', 'http://a'],
|
||||
{'http://a': 2, 'http://b': 1}),
|
||||
(['http://a/a a', 'http://b', 'http://a/a%20a'],
|
||||
{'http://a/a a': 2, 'http://b': 1}),
|
||||
]
|
||||
)
|
||||
def test_visits(self, web_history, urls, expected):
|
||||
for url in urls:
|
||||
web_history.add_url(QUrl(url))
|
||||
completion = {i.url: i.visits for i in web_history.completion}
|
||||
assert completion == expected
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'urls, expected', [
|
||||
([('http://a', 1)], [('http://a', lambda b: 1)]),
|
||||
([('http://a', 1), ('http://a', 3), ('http://a', 8)],
|
||||
[('http://a', lambda b: 2 * b + 8)]),
|
||||
([('http://a', 1), ('http://b/b b', 2), ('http://a', 3),
|
||||
('http://b/b%20b', 3), ('http://b/b b', 4), ('http://b/b b', 9)],
|
||||
[('http://a', lambda b: b + 3),
|
||||
('http://b/b b', lambda b: 3 * b + 9)]),
|
||||
]
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
'frecency_bonus', [0, 1, 2, 100]
|
||||
)
|
||||
def test_frecency(self, config_stub, web_history, urls, expected, frecency_bonus):
|
||||
config_stub.val.completion.web_history.frecency_bonus = frecency_bonus
|
||||
calculated_expected = {k: v(frecency_bonus) for k, v in expected}
|
||||
for url, atime in urls:
|
||||
web_history.add_url(QUrl(url), atime=atime)
|
||||
completion = {i.url: i.frecency for i in web_history.completion}
|
||||
assert completion == calculated_expected
|
||||
|
||||
|
||||
class TestHistoryInterface:
|
||||
|
||||
|
|
@ -361,15 +397,15 @@ class TestRebuild:
|
|||
web_history.completion.delete('url', 'example.com/2')
|
||||
|
||||
hist2 = history.WebHistory(database, progress=stubs.FakeHistoryProgress())
|
||||
assert list(hist2.completion) == [('example.com/1', '', 1)]
|
||||
assert list(hist2.completion) == [('example.com/1', '', 1, 1, 1)]
|
||||
|
||||
monkeypatch.setattr(web_history.database, 'user_version_changed', lambda: True)
|
||||
|
||||
hist3 = history.WebHistory(web_history.database,
|
||||
progress=stubs.FakeHistoryProgress())
|
||||
assert list(hist3.completion) == [
|
||||
('example.com/1', '', 1),
|
||||
('example.com/2', '', 2),
|
||||
('example.com/1', '', 1, 1, 1),
|
||||
('example.com/2', '', 2, 1, 2),
|
||||
]
|
||||
assert not hist3.metainfo['force_rebuild']
|
||||
|
||||
|
|
@ -382,14 +418,14 @@ class TestRebuild:
|
|||
|
||||
hist2 = history.WebHistory(web_history.database,
|
||||
progress=stubs.FakeHistoryProgress())
|
||||
assert list(hist2.completion) == [('example.com/1', '', 1)]
|
||||
assert list(hist2.completion) == [('example.com/1', '', 1, 1, 1)]
|
||||
hist2.metainfo['force_rebuild'] = True
|
||||
|
||||
hist3 = history.WebHistory(web_history.database,
|
||||
progress=stubs.FakeHistoryProgress())
|
||||
assert list(hist3.completion) == [
|
||||
('example.com/1', '', 1),
|
||||
('example.com/2', '', 2),
|
||||
('example.com/1', '', 1, 1, 1),
|
||||
('example.com/2', '', 2, 1, 2),
|
||||
]
|
||||
assert not hist3.metainfo['force_rebuild']
|
||||
|
||||
|
|
@ -407,7 +443,7 @@ class TestRebuild:
|
|||
|
||||
hist2 = history.WebHistory(web_history.database,
|
||||
progress=stubs.FakeHistoryProgress())
|
||||
assert list(hist2.completion) == [('http://example.com', '', 1)]
|
||||
assert list(hist2.completion) == [('http://example.com', '', 1, 1, 1)]
|
||||
|
||||
def test_pattern_change_rebuild(self, config_stub, web_history, stubs):
|
||||
"""Ensure that completion is rebuilt when exclude patterns change."""
|
||||
|
|
@ -421,7 +457,7 @@ class TestRebuild:
|
|||
hist2 = history.WebHistory(web_history.database,
|
||||
progress=stubs.FakeHistoryProgress())
|
||||
assert list(hist2.completion) == [
|
||||
('http://example.com', '', 1),
|
||||
('http://example.com', '', 1, 1, 1),
|
||||
]
|
||||
|
||||
config_stub.val.completion.web_history.exclude = []
|
||||
|
|
@ -429,8 +465,8 @@ class TestRebuild:
|
|||
hist3 = history.WebHistory(web_history.database,
|
||||
progress=stubs.FakeHistoryProgress())
|
||||
assert list(hist3.completion) == [
|
||||
('http://example.com', '', 1),
|
||||
('http://example.org', '', 2)
|
||||
('http://example.com', '', 1, 1, 1),
|
||||
('http://example.org', '', 2, 1, 2),
|
||||
]
|
||||
|
||||
def test_progress(self, monkeypatch, web_history, config_stub, stubs):
|
||||
|
|
@ -463,7 +499,7 @@ class TestRebuild:
|
|||
|
||||
hist2 = history.WebHistory(web_history.database,
|
||||
progress=stubs.FakeHistoryProgress())
|
||||
assert list(hist2.completion) == [('example.com/1', '', 1)]
|
||||
assert list(hist2.completion) == [('example.com/1', '', 1, 1, 1)]
|
||||
|
||||
|
||||
class TestCompletionMetaInfo:
|
||||
|
|
|
|||
|
|
@ -21,7 +21,8 @@ def hist(data_tmpdir, config_stub):
|
|||
db = sql.Database(str(data_tmpdir / 'test_histcategory.db'))
|
||||
config_stub.val.completion.timestamp_format = '%Y-%m-%d'
|
||||
config_stub.val.completion.web_history.max_items = -1
|
||||
yield sql.SqlTable(db, 'CompletionHistory', ['url', 'title', 'last_atime'])
|
||||
yield sql.SqlTable(db, 'CompletionHistory',
|
||||
['url', 'title', 'last_atime', 'visits', 'frecency'])
|
||||
db.close() # pytest could re-use the filename
|
||||
|
||||
|
||||
|
|
@ -183,8 +184,9 @@ def test_set_pattern_hypothesis(hist, pat, caplog):
|
|||
]),
|
||||
(1, [], []), # issue 2849 (crash with empty history)
|
||||
])
|
||||
def test_sorting(max_items, before, after, model_validator, hist, config_stub):
|
||||
def test_sorting_recency(max_items, before, after, model_validator, hist, config_stub):
|
||||
"""Validate the filtering and sorting results of set_pattern."""
|
||||
config_stub.val.completion.web_history.sort_criterion = 'recency'
|
||||
config_stub.val.completion.web_history.max_items = max_items
|
||||
for url, title, atime in before:
|
||||
timestamp = datetime.datetime.strptime(atime, '%Y-%m-%d').timestamp()
|
||||
|
|
@ -195,6 +197,83 @@ def test_sorting(max_items, before, after, model_validator, hist, config_stub):
|
|||
model_validator.validate(after)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('max_items, before, after', [
|
||||
(-1, [
|
||||
('a', 'a', '2017-04-16', 1),
|
||||
('b', 'b', '2017-06-16', 3),
|
||||
('c', 'c', '2017-05-16', 2),
|
||||
], [
|
||||
('b', 'b', '2017-06-16'),
|
||||
('c', 'c', '2017-05-16'),
|
||||
('a', 'a', '2017-04-16'),
|
||||
]),
|
||||
(-1, [
|
||||
('a', 'a', '2017-04-16', 1),
|
||||
('b', 'b', '2017-06-16', 1),
|
||||
('c', 'c', '2017-05-16', 1),
|
||||
], [
|
||||
('b', 'b', '2017-06-16'),
|
||||
('c', 'c', '2017-05-16'),
|
||||
('a', 'a', '2017-04-16'),
|
||||
]),
|
||||
(2, [
|
||||
('a', 'a', '2017-04-16', 1),
|
||||
('b', 'b', '2017-06-16', 3),
|
||||
('c', 'c', '2017-05-16', 2),
|
||||
], [
|
||||
('b', 'b', '2017-06-16'),
|
||||
('c', 'c', '2017-05-16'),
|
||||
]),
|
||||
])
|
||||
def test_sorting_frequency(max_items, before, after, model_validator, hist,
|
||||
config_stub):
|
||||
"""Validate sorting results of set_pattern for frequency."""
|
||||
config_stub.val.completion.web_history.sort_criterion = 'frequency'
|
||||
config_stub.val.completion.web_history.max_items = max_items
|
||||
for url, title, atime, visits in before:
|
||||
timestamp = datetime.datetime.strptime(atime, '%Y-%m-%d').timestamp()
|
||||
hist.insert({'url': url, 'title': title, 'last_atime': timestamp,
|
||||
'visits': visits})
|
||||
cat = histcategory.HistoryCategory(database=hist.database)
|
||||
model_validator.set_model(cat)
|
||||
cat.set_pattern('')
|
||||
model_validator.validate(after)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('max_items, before, after', [
|
||||
(-1, [
|
||||
('a', 'a', '2017-04-16', 1),
|
||||
('b', 'b', '2017-06-16', 3),
|
||||
('c', 'c', '2017-05-16', 2),
|
||||
], [
|
||||
('b', 'b', '2017-06-16'),
|
||||
('c', 'c', '2017-05-16'),
|
||||
('a', 'a', '2017-04-16'),
|
||||
]),
|
||||
(2, [
|
||||
('a', 'a', '2017-04-16', 1),
|
||||
('b', 'b', '2017-06-16', 3),
|
||||
('c', 'c', '2017-05-16', 2),
|
||||
], [
|
||||
('b', 'b', '2017-06-16'),
|
||||
('c', 'c', '2017-05-16'),
|
||||
]),
|
||||
])
|
||||
def test_sorting_frecency(max_items, before, after, model_validator, hist,
|
||||
config_stub):
|
||||
"""Validate sorting results of set_pattern for frecency."""
|
||||
config_stub.val.completion.web_history.sort_criterion = 'frecency'
|
||||
config_stub.val.completion.web_history.max_items = max_items
|
||||
for url, title, atime, frecency in before:
|
||||
timestamp = datetime.datetime.strptime(atime, '%Y-%m-%d').timestamp()
|
||||
hist.insert({'url': url, 'title': title, 'last_atime': timestamp,
|
||||
'frecency': frecency})
|
||||
cat = histcategory.HistoryCategory(database=hist.database)
|
||||
model_validator.set_model(cat)
|
||||
cat.set_pattern('')
|
||||
model_validator.validate(after)
|
||||
|
||||
|
||||
def test_remove_rows(hist, model_validator):
|
||||
hist.insert({'url': 'foo', 'title': 'Foo', 'last_atime': 0})
|
||||
hist.insert({'url': 'bar', 'title': 'Bar', 'last_atime': 0})
|
||||
|
|
|
|||
|
|
@ -1306,7 +1306,9 @@ def test_url_completion_benchmark(benchmark, info,
|
|||
entries = {
|
||||
'last_atime': list(r),
|
||||
'url': ['http://example.com/{}'.format(i) for i in r],
|
||||
'title': ['title{}'.format(i) for i in r]
|
||||
'title': ['title{}'.format(i) for i in r],
|
||||
'visits': list(r),
|
||||
'frecency': list(r),
|
||||
}
|
||||
|
||||
web_history.completion.insert_batch(entries)
|
||||
|
|
|
|||
|
|
@ -247,6 +247,24 @@ def test_iter(database):
|
|||
('thirteen', 13, True)]
|
||||
|
||||
|
||||
def test_update(database, qtbot):
|
||||
table = database.table('Foo', ['a', 'b', 'c'])
|
||||
table.insert({'a': 10, 'b': 10, 'c': 10})
|
||||
table.insert({'a': 20, 'b': 20, 'c': 20})
|
||||
|
||||
with qtbot.wait_signal(table.changed):
|
||||
table.update({'a': 11, 'b': 12, 'c': 13}, {'a': 10, 'b': 10})
|
||||
with qtbot.wait_signal(table.changed):
|
||||
table.update({'a': 21}, {'c': 20})
|
||||
|
||||
assert list(table) == [(11, 12, 13), (21, 20, 20)]
|
||||
|
||||
with qtbot.wait_signal(table.changed):
|
||||
table.update({'a': 'a * a', 'b': 'b + 3'}, {'a': 11, 'b': 12})
|
||||
|
||||
assert list(table) == [(121, 15, 13), (21, 20, 20)]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('rows, sort_by, sort_order, limit, result', [
|
||||
([{"a": 2, "b": 5}, {"a": 1, "b": 6}, {"a": 3, "b": 4}], 'a', 'asc', 5,
|
||||
[(1, 6), (2, 5), (3, 4)]),
|
||||
|
|
|
|||
Loading…
Reference in New Issue