Skip to content

Commit ce96fe2

Browse files
authored
Merge pull request CloudBotIRC#191 from linuxdaemon/gonzobot+pager
Add a util module for paginating lists
2 parents 3bbfacc + ab83b77 commit ce96fe2

File tree

5 files changed

+225
-197
lines changed

5 files changed

+225
-197
lines changed

cloudbot/util/pager.py

Lines changed: 76 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,76 @@
1+
from threading import RLock
2+
3+
from cloudbot.util.sequence import chunk_iter
4+
5+
6+
class Pager:
7+
"""Multiline pager
8+
9+
Takes a string with newlines and paginates it to certain size chunks
10+
"""
11+
12+
@classmethod
13+
def from_multiline_string(cls, s):
14+
return cls(s.splitlines())
15+
16+
def __init__(self, lines, chunk_size=2):
17+
# This lock should always be acquired when accessing data from this object
18+
# Added here due to extensive use of threads throughout plugins
19+
self.lock = RLock()
20+
self.chunk_size = chunk_size
21+
self.chunks = tuple(chunk_iter(lines, self.chunk_size))
22+
self.current_pos = 0
23+
24+
def format_chunk(self, chunk, pagenum):
25+
chunk = list(chunk)
26+
if len(self.chunks) > 1:
27+
chunk[-1] += " (page {}/{})".format(pagenum + 1, len(self.chunks))
28+
29+
return chunk
30+
31+
def next(self):
32+
with self.lock:
33+
if self.current_pos >= len(self.chunks):
34+
return None
35+
36+
chunk = self[self.current_pos]
37+
self.current_pos += 1
38+
39+
return chunk
40+
41+
def get(self, index):
42+
"""Get a specific page"""
43+
return self[index]
44+
45+
def __getitem__(self, item):
46+
"""Get a specific page"""
47+
with self.lock:
48+
chunk = self.chunks[item]
49+
return self.format_chunk(chunk, item)
50+
51+
def __len__(self):
52+
with self.lock:
53+
return len(self.chunks)
54+
55+
56+
def paginated_list(data, delim=" \u2022 ", suffix='...', max_len=256, page_size=2):
57+
lines = [""]
58+
for item in data:
59+
if len(item) > max_len:
60+
# The length of a single item is longer then our max line length, split it
61+
lines.append(item[:max_len])
62+
lines.append(item[max_len:])
63+
elif len(lines[-1]) + len(item) > max_len:
64+
lines.append(item)
65+
else:
66+
if lines[-1]:
67+
lines[-1] += delim
68+
69+
lines[-1] += item
70+
71+
formatted_lines = []
72+
while lines:
73+
line = lines.pop(0)
74+
formatted_lines.append("{}{}".format(line, suffix if lines else ""))
75+
76+
return Pager(formatted_lines, chunk_size=page_size)

cloudbot/util/sequence.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
"""
2+
Sequence utilities - Various util functions for working with lists, sets, tuples, etc
3+
"""
4+
5+
6+
def chunk_iter(data, chunk_size):
7+
"""
8+
Splits a sequence in to chunks
9+
:param data: The sequence to split
10+
:param chunk_size: The maximum size of each chunk
11+
:return: An iterable of all the chunks of the sequence
12+
"""
13+
for i in range(0, len(data), chunk_size):
14+
yield data[i:i + chunk_size]

plugins/grab.py

Lines changed: 27 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,9 @@
88

99
from cloudbot import hook
1010
from cloudbot.util import database
11+
from cloudbot.util.pager import paginated_list
1112

12-
search_pages = defaultdict(list)
13-
search_page_indexes = {}
13+
search_pages = defaultdict(dict)
1414

1515
table = Table(
1616
'grab',
@@ -43,42 +43,28 @@ def load_cache(db):
4343
grab_cache.setdefault(chan, {}).setdefault(name, []).append(quote)
4444

4545

46-
def two_lines(bigstring, chan):
47-
"""Receives a string with new lines. Groups the string into a list of strings with up to 3 new lines per string element. Returns first string element then stores the remaining list in search_pages."""
48-
global search_pages
49-
temp = bigstring.split('\n')
50-
for i in range(0, len(temp), 2):
51-
search_pages[chan].append('\n'.join(temp[i:i+2]))
52-
search_page_indexes[chan] = 0
53-
return search_pages[chan][0]
54-
55-
56-
def smart_truncate(content, length=355, suffix='...\n'):
57-
if len(content) <= length:
58-
return content
59-
else:
60-
return content[:length].rsplit(' \u2022 ', 1)[0]+ suffix + content[:length].rsplit(' \u2022 ', 1)[1] + smart_truncate(content[length:])
61-
62-
6346
@hook.command("moregrab", autohelp=False)
64-
def moregrab(text, chan):
47+
def moregrab(text, chan, conn):
6548
"""if a grab search has lots of results the results are pagintated. If the most recent search is paginated the pages are stored for retreival. If no argument is given the next page will be returned else a page number can be specified."""
66-
if not search_pages[chan]:
67-
return "There are grabsearch pages to show."
49+
pages = search_pages[conn.name].get(chan)
50+
if not pages:
51+
return "There are no grabsearch pages to show."
52+
6853
if text:
69-
index = ""
7054
try:
7155
index = int(text)
7256
except ValueError:
7357
return "Please specify an integer value."
74-
if abs(int(index)) > len(search_pages[chan]) or index == 0:
75-
return "please specify a valid page number between 1 and {}.".format(len(search_pages[chan]))
58+
59+
page = pages[index - 1]
60+
if page is None:
61+
return "Please specify a valid page number between 1 and {}.".format(len(pages))
7662
else:
77-
return "{}(page {}/{})".format(search_pages[chan][index-1], index, len(search_pages[chan]))
63+
return page
7864
else:
79-
search_page_indexes[chan] += 1
80-
if search_page_indexes[chan] < len(search_pages[chan]):
81-
return "{}(page {}/{})".format(search_pages[chan][search_page_indexes[chan]], search_page_indexes[chan] + 1, len(search_pages[chan]))
65+
page = pages.next()
66+
if page is not None:
67+
return page
8268
else:
8369
return "All pages have been shown you can specify a page number or do a new search."
8470

@@ -160,7 +146,7 @@ def lastgrab(text, chan, message):
160146
return "<{}> has never been grabbed.".format(text)
161147
if lgrab:
162148
quote = lgrab
163-
message(format_grab(text, quote),chan)
149+
message(format_grab(text, quote), chan)
164150

165151

166152
@hook.command("grabrandom", "grabr", autohelp=False)
@@ -190,12 +176,9 @@ def grabrandom(text, chan, message):
190176

191177

192178
@hook.command("grabsearch", "grabs", autohelp=False)
193-
def grabsearch(text, chan):
179+
def grabsearch(text, chan, conn):
194180
""".grabsearch <text> matches "text" against nicks or grab strings in the database"""
195-
out = ""
196181
result = []
197-
search_pages[chan] = []
198-
search_page_indexes[chan] = 0
199182
try:
200183
quotes = grab_cache[chan][text.lower()]
201184
for grab in quotes:
@@ -208,17 +191,17 @@ def grabsearch(text, chan):
208191
if text.lower() in grab.lower():
209192
result.append((name, grab))
210193
if result:
211-
for grab in result:
212-
name = grab[0]
194+
grabs = []
195+
for name, quote in result:
213196
if text.lower() == name:
214197
name = text
215-
quote = grab[1]
216-
out += "{} {} ".format(format_grab(name, quote), u'\u2022')
217-
out = smart_truncate(out)
218-
out = out[:-2]
219-
out = two_lines(out, chan)
220-
if len(search_pages[chan]) > 1:
221-
return "{}(page {}/{}) .moregrab".format(out, search_page_indexes[chan] + 1, len(search_pages[chan]))
222-
return out
198+
grabs.append(format_grab(name, quote))
199+
pager = paginated_list(grabs)
200+
search_pages[conn.name][chan] = pager
201+
page = pager.next()
202+
if len(page) > 1:
203+
page[-1] += " .moregrab"
204+
205+
return page
223206
else:
224207
return "I couldn't find any matches for {}.".format(text)

0 commit comments

Comments
 (0)