189
190
self._cache[key] = (revid, data)
192
# Used to store locks that prevent multiple threads from building a
193
# revision graph for the same branch at the same time, because that can
194
# cause severe performance issues that are so bad that the system seems
196
revision_graph_locks = {}
197
revision_graph_check_lock = threading.Lock()
192
199
class History(object):
193
200
"""Decorate a branch to provide information for rendering.
227
234
def update_missed_caches():
228
235
for cache in missed_caches:
229
236
cache.set(cache_key, self.last_revid, self._rev_info)
231
data = cache.get(cache_key, self.last_revid)
233
self._rev_info = data
238
# Theoretically, it's possible for two threads to race in creating
239
# the Lock() object for their branch, so we put a lock around
240
# creating the per-branch Lock().
241
revision_graph_check_lock.acquire()
243
if cache_key not in revision_graph_locks:
244
revision_graph_locks[cache_key] = threading.Lock()
246
revision_graph_check_lock.release()
248
revision_graph_locks[cache_key].acquire()
251
data = cache.get(cache_key, self.last_revid)
253
self._rev_info = data
254
update_missed_caches()
257
missed_caches.append(cache)
259
whole_history_data = compute_whole_history_data(self._branch)
260
self._rev_info, self._rev_indices = whole_history_data
234
261
update_missed_caches()
237
missed_caches.append(cache)
239
whole_history_data = compute_whole_history_data(self._branch)
240
self._rev_info, self._rev_indices = whole_history_data
241
update_missed_caches()
263
revision_graph_locks[cache_key].release()
243
265
if self._rev_indices is not None:
244
266
self._revno_revid = {}