+gint container_linux::clear_images(gsize desired_size)
+{
+ gsize size = 0;
+ gint num = 0;
+
+ lock_images_cache();
+
+ /* First, remove all local images - the ones with "cid:" URL. */
+ for (auto i = m_images.begin(); i != m_images.end(); ) {
+ if (!strncmp(i->first.c_str(), "cid:", 4)) {
+ g_object_unref(i->second.first);
+ i = m_images.erase(i);
+ num++;
+ } else {
+ ++i;
+ }
+ }
+
+ /* Second, build an LRU list */
+ auto lru_comp_func = [](const lru_entry& l1, const lru_entry& l2) {
+ return timercmp(&l1.second, &l2.second, <);
+ };
+ std::set<lru_entry, decltype(lru_comp_func)> lru(lru_comp_func);
+
+ for (auto i = m_images.begin(); i != m_images.end(); ++i) {
+ lru.insert(std::make_pair(i->first, i->second.second));
+ }
+
+ /*
+ for (auto l = lru.begin(); l != lru.end(); l++) {
+ debug_print("lru dump: %d %d %s\n", l->second.tv_sec, l->second.tv_usec, l->first.c_str());
+ }
+ */
+
+ /* Last, walk the LRU list and remove the oldest entries that push it over
+ * the desired size limit */
+ for (auto l = lru.rbegin(); l != lru.rend(); ++l) {
+ gsize cursize;
+
+ auto i = m_images.find(l->first);
+
+ if(i == m_images.end()) {
+ g_warning("failed to find '%s' in m_images", l->first.c_str());
+ continue;
+ }
+
+ if(i->second.first == NULL) {
+ /* This should mean that the download is still in progress */
+ debug_print("warning - trying to prune a null pixbuf for %s\n", i->first.c_str());
+ continue;
+ }
+
+ cursize = gdk_pixbuf_get_byte_length(i->second.first);
+ /*
+ debug_print("clear_images: desired_size %d - size %d - cursize %d - %d %d %s\n",
+ desired_size, size, cursize, l->second.tv_sec, l->second.tv_usec, l->first.c_str());
+ */
+ if (size + cursize > desired_size) {
+ debug_print("pruning %s from image cache\n", i->first.c_str());
+ g_object_unref(i->second.first);
+ m_images.erase(i);
+ num++;
+ } else {
+ size += cursize;
+ }
+ }
+
+ unlock_images_cache();
+
+ return num;
+}
+
+/*
+const char* container_linux::get_default_font_name() const