diff --git a/help/changelog.html b/help/changelog.html
index 0c6e3cad1..a5c7b26f3 100755
--- a/help/changelog.html
+++ b/help/changelog.html
@@ -8,7 +8,24 @@
changelog
- version 138
+ version 140
+
+ - if a repository or subscription popup message has nothing to report, it will dismiss itself
+ - fixed handling of text popup display when the object passed was not text
+ - delete orphans is now cancellable
+ - vacuum, deleted orphans, and upload pending popup messages will dismiss themselves an hour after they are done
+ - tightened the subscription final state popup message to just a title and a button
+ - removed much of the very expensive autocomplete tag cache maintenance code, which seems not to be worth the effort
+ - culled the autocomplete tag cache in prep for new maintenance cycle
+ - fixed a resize timing bug that was causing large images to scale in an ugly way when the media viewer was launched in a borderless state
+ - 'open selection in new page' will no longer default focus on the sort dropdown; it'll go to the media panel (this was causing scrolling confusion)
+ - fixed a non-updating display bug when resizing frames/dialogs with auto-resizing listctrls on linux
+ - cleaned up a wall-of-text error when closing the client immediately after deleting a tag service
+ - filled a gap in static text image object cleanup
+ - cleaned up some thumbnail waterfall/fade code
+ - filled several gaps in thumbnail object cleanup
+
+ version 139
- cleaned up all the old popup message code, and fully integrated the new
- moved repo sync to the new popup messaging system
diff --git a/include/ClientConstants.py b/include/ClientConstants.py
index 23bf62082..5139a31c1 100755
--- a/include/ClientConstants.py
+++ b/include/ClientConstants.py
@@ -837,7 +837,7 @@ def ShowTextClient( text ):
job_key = HC.JobKey()
- job_key.SetVariable( 'popup_message_text_1', text )
+ job_key.SetVariable( 'popup_message_text_1', HC.u( text ) )
HC.pubsub.pub( 'message', job_key )
@@ -1340,7 +1340,7 @@ def AddData( self, key, data ):
if key not in self._keys_to_data:
- while self._total_estimated_memory_footprint > HC.options[ self._cache_size_key ] or ( random.randint( 0, 2 ) == 0 and len( self._keys_to_data ) > 0 ):
+ while self._total_estimated_memory_footprint > HC.options[ self._cache_size_key ]:
( deletee_key, last_access_time ) = self._keys_fifo.pop( 0 )
diff --git a/include/ClientController.py b/include/ClientController.py
index 50f01a78a..373ffa1f7 100755
--- a/include/ClientController.py
+++ b/include/ClientController.py
@@ -335,7 +335,11 @@ def MaintainDB( self ):
services = self.GetManager( 'services' ).GetServices()
- for service in services: self.Read( 'service_info', service.GetServiceKey() )
+ for service in services:
+
+ try: self.Read( 'service_info', service.GetServiceKey() )
+ except: pass # sometimes this breaks when a service has just been removed and the client is closing, so ignore the error
+
self._timestamps[ 'last_service_info_cache_fatten' ] = HC.GetNow()
diff --git a/include/ClientDB.py b/include/ClientDB.py
index c0d1b8d01..78d8d5be2 100755
--- a/include/ClientDB.py
+++ b/include/ClientDB.py
@@ -1263,10 +1263,10 @@ def _GetNamespaceIdTagId( self, tag ):
self._c.execute( 'INSERT INTO existing_tags ( namespace_id, tag_id ) VALUES ( ?, ? );', ( namespace_id, tag_id ) )
- tag_service_ids = self._GetServiceIds( ( HC.TAG_REPOSITORY, HC.LOCAL_TAG, HC.COMBINED_TAG ) )
- file_service_ids = self._GetServiceIds( ( HC.FILE_REPOSITORY, HC.LOCAL_FILE, HC.COMBINED_FILE ) )
+ #tag_service_ids = self._GetServiceIds( ( HC.TAG_REPOSITORY, HC.LOCAL_TAG, HC.COMBINED_TAG ) )
+ #file_service_ids = self._GetServiceIds( ( HC.FILE_REPOSITORY, HC.LOCAL_FILE, HC.COMBINED_FILE ) )
- self._c.executemany( 'INSERT OR IGNORE INTO autocomplete_tags_cache ( file_service_id, tag_service_id, namespace_id, tag_id, current_count, pending_count ) VALUES ( ?, ?, ?, ?, ?, ? );', [ ( file_service_id, tag_service_id, namespace_id, tag_id, 0, 0 ) for ( tag_service_id, file_service_id ) in itertools.product( tag_service_ids, file_service_ids ) ] )
+ #self._c.executemany( 'INSERT OR IGNORE INTO autocomplete_tags_cache ( file_service_id, tag_service_id, namespace_id, tag_id, current_count, pending_count ) VALUES ( ?, ?, ?, ?, ?, ? );', [ ( file_service_id, tag_service_id, namespace_id, tag_id, 0, 0 ) for ( tag_service_id, file_service_id ) in itertools.product( tag_service_ids, file_service_ids ) ] )
return ( namespace_id, tag_id )
@@ -1385,7 +1385,7 @@ def _AddService( self, service_key, service_type, name, info ):
inserts = ( ( file_service_id, service_id, namespace_id, tag_id, 0, 0 ) for ( file_service_id, ( namespace_id, tag_id ) ) in itertools.product( file_service_ids, existing_tag_ids ) )
- self._c.executemany( 'INSERT OR IGNORE INTO autocomplete_tags_cache ( file_service_id, tag_service_id, namespace_id, tag_id, current_count, pending_count ) VALUES ( ?, ?, ?, ?, ?, ? );', inserts )
+ #self._c.executemany( 'INSERT OR IGNORE INTO autocomplete_tags_cache ( file_service_id, tag_service_id, namespace_id, tag_id, current_count, pending_count ) VALUES ( ?, ?, ?, ?, ?, ? );', inserts )
elif service_type == HC.FILE_REPOSITORY:
@@ -1395,7 +1395,7 @@ def _AddService( self, service_key, service_type, name, info ):
inserts = ( ( service_id, tag_service_id, namespace_id, tag_id, 0, 0 ) for ( tag_service_id, ( namespace_id, tag_id ) ) in itertools.product( tag_service_ids, existing_tag_ids ) )
- self._c.executemany( 'INSERT OR IGNORE INTO autocomplete_tags_cache ( file_service_id, tag_service_id, namespace_id, tag_id, current_count, pending_count ) VALUES ( ?, ?, ?, ?, ?, ? );', inserts )
+ #self._c.executemany( 'INSERT OR IGNORE INTO autocomplete_tags_cache ( file_service_id, tag_service_id, namespace_id, tag_id, current_count, pending_count ) VALUES ( ?, ?, ?, ?, ?, ? );', inserts )
@@ -1567,7 +1567,7 @@ def _DeleteOrphans( self ):
prefix = 'database maintenance - delete orphans: '
- job_key = HC.JobKey()
+ job_key = HC.JobKey( cancellable = True )
job_key.SetVariable( 'popup_message_text_1', prefix + 'gathering file information' )
@@ -1595,6 +1595,18 @@ def _DeleteOrphans( self ):
try:
+ if HC.shutdown or job_key.IsCancelled():
+
+ if HC.shutdown: return
+
+ if job_key.IsCancelled():
+
+ job_key.SetVariable( 'popup_message_text_1', prefix + 'cancelled' )
+
+ return
+
+
+
path = CC.GetFilePath( hash )
os.chmod( path, stat.S_IWRITE )
@@ -1629,16 +1641,36 @@ def _DeleteOrphans( self ):
path = CC.GetExpectedThumbnailPath( hash, True )
resized_path = CC.GetExpectedThumbnailPath( hash, False )
- if os.path.exists( path ): os.remove( path )
- if os.path.exists( resized_path ): os.remove( resized_path )
+ try:
+
+ if HC.shutdown or job_key.IsCancelled():
+
+ if HC.shutdown: return
+
+ if job_key.IsCancelled():
+
+ job_key.SetVariable( 'popup_message_text_1', prefix + 'cancelled' )
+
+ return
+
+
+
+ if os.path.exists( path ): os.remove( path )
+ if os.path.exists( resized_path ): os.remove( resized_path )
+
+ except: continue
self._c.execute( 'REPLACE INTO shutdown_timestamps ( shutdown_type, timestamp ) VALUES ( ?, ? );', ( CC.SHUTDOWN_TIMESTAMP_DELETE_ORPHANS, HC.GetNow() ) )
job_key.SetVariable( 'popup_message_text_1', prefix + 'done!' )
+ job_key.Finish()
+
print( HC.ConvertJobKeyToString( job_key ) )
+ wx.CallLater( 1000 * 3600, job_key.Delete )
+
def _DeletePending( self, service_key ):
@@ -3832,6 +3864,10 @@ def _ProcessContentUpdates( self, service_keys_to_content_updates, pub_immediate
if len( ultimate_mappings_ids ) + len( ultimate_deleted_mappings_ids ) + len( ultimate_pending_mappings_ids ) + len( ultimate_pending_rescinded_mappings_ids ) + len( ultimate_petitioned_mappings_ids ) + len( ultimate_petitioned_rescinded_mappings_ids ) > 0:
+ #import cProfile
+
+ #cProfile.runctx( 'self._UpdateMappings( service_id, mappings_ids = ultimate_mappings_ids, deleted_mappings_ids = ultimate_deleted_mappings_ids, pending_mappings_ids = ultimate_pending_mappings_ids, pending_rescinded_mappings_ids = ultimate_pending_rescinded_mappings_ids, petitioned_mappings_ids = ultimate_petitioned_mappings_ids, petitioned_rescinded_mappings_ids = ultimate_petitioned_rescinded_mappings_ids )', globals(), locals())
+
self._UpdateMappings( service_id, mappings_ids = ultimate_mappings_ids, deleted_mappings_ids = ultimate_deleted_mappings_ids, pending_mappings_ids = ultimate_pending_mappings_ids, pending_rescinded_mappings_ids = ultimate_pending_rescinded_mappings_ids, petitioned_mappings_ids = ultimate_petitioned_mappings_ids, petitioned_rescinded_mappings_ids = ultimate_petitioned_rescinded_mappings_ids )
notify_new_pending = True
@@ -4189,36 +4225,36 @@ def ChangeMappingStatus( namespace_id, tag_id, hash_ids, old_status, new_status
UpdateAutocompleteTagCache( tag_service_id, namespace_id, tag_id, pertinent_hash_ids, HC.PENDING, 1 )
- UpdateCombinedMappings( namespace_id, tag_id, pertinent_hash_ids, 'insert', HC.PENDING )
+ UpdateCombinedMappings( namespace_id, tag_id, pertinent_hash_ids, HC.PENDING, 1 )
if old_status == HC.PENDING and new_status != HC.PENDING:
UpdateAutocompleteTagCache( tag_service_id, namespace_id, tag_id, pertinent_hash_ids, HC.PENDING, -1 )
- UpdateCombinedMappings( namespace_id, tag_id, pertinent_hash_ids, 'delete', HC.PENDING )
+ UpdateCombinedMappings( namespace_id, tag_id, pertinent_hash_ids, HC.PENDING, -1 )
if old_status != HC.CURRENT and new_status == HC.CURRENT:
UpdateAutocompleteTagCache( tag_service_id, namespace_id, tag_id, pertinent_hash_ids, HC.CURRENT, 1 )
- UpdateCombinedMappings( namespace_id, tag_id, pertinent_hash_ids, 'insert', HC.CURRENT )
+ UpdateCombinedMappings( namespace_id, tag_id, pertinent_hash_ids, HC.CURRENT, 1 )
if old_status == HC.CURRENT and new_status != HC.CURRENT:
UpdateAutocompleteTagCache( tag_service_id, namespace_id, tag_id, pertinent_hash_ids, HC.CURRENT, -1 )
- UpdateCombinedMappings( namespace_id, tag_id, pertinent_hash_ids, 'delete', HC.CURRENT )
+ UpdateCombinedMappings( namespace_id, tag_id, pertinent_hash_ids, HC.CURRENT, -1 )
return ( num_old_deleted + num_old_made_new, num_old_made_new )
- def UpdateCombinedMappings( namespace_id, tag_id, hash_ids, action, status ):
+ def UpdateCombinedMappings( namespace_id, tag_id, hash_ids, status, direction ):
- if action == 'delete':
+ if direction == -1:
existing_other_service_hash_ids = { hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM mappings WHERE service_id IN ' + splayed_other_service_ids + ' AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ' AND status = ?;', ( namespace_id, tag_id, status ) ) }
@@ -4226,9 +4262,7 @@ def UpdateCombinedMappings( namespace_id, tag_id, hash_ids, action, status ):
self._c.execute( 'DELETE FROM mappings WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( pertinent_hash_ids ) + ' AND status = ?;', ( self._combined_tag_service_id, namespace_id, tag_id, status ) )
- direction = -1
-
- elif action == 'insert':
+ elif direction == 1:
existing_combined_hash_ids = { hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM mappings WHERE service_id = ? AND namespace_id = ? AND tag_id = ? AND hash_id IN ' + HC.SplayListForDB( hash_ids ) + ' AND status = ?;', ( self._combined_tag_service_id, namespace_id, tag_id, status ) ) }
@@ -4236,10 +4270,8 @@ def UpdateCombinedMappings( namespace_id, tag_id, hash_ids, action, status ):
self._c.executemany( 'INSERT OR IGNORE INTO mappings VALUES ( ?, ?, ?, ?, ? );', [ ( self._combined_tag_service_id, namespace_id, tag_id, hash_id, status ) for hash_id in pertinent_hash_ids ] )
- direction = 1
-
- UpdateAutocompleteTagCache( self._combined_tag_service_id, namespace_id, tag_id, pertinent_hash_ids, status, direction )
+ if len( pertinent_hash_ids ) > 0: UpdateAutocompleteTagCache( self._combined_tag_service_id, namespace_id, tag_id, pertinent_hash_ids, status, direction )
def DeletePending( namespace_id, tag_id, hash_ids ):
@@ -4250,7 +4282,7 @@ def DeletePending( namespace_id, tag_id, hash_ids ):
UpdateAutocompleteTagCache( tag_service_id, namespace_id, tag_id, hash_ids, HC.PENDING, -1 )
- UpdateCombinedMappings( namespace_id, tag_id, hash_ids, 'delete', HC.PENDING )
+ UpdateCombinedMappings( namespace_id, tag_id, hash_ids, HC.PENDING, -1 )
return num_deleted
@@ -4279,13 +4311,13 @@ def InsertMappings( namespace_id, tag_id, hash_ids, status ):
UpdateAutocompleteTagCache( tag_service_id, namespace_id, tag_id, new_hash_ids, HC.CURRENT, 1 )
- UpdateCombinedMappings( namespace_id, tag_id, new_hash_ids, 'insert', HC.CURRENT )
+ UpdateCombinedMappings( namespace_id, tag_id, new_hash_ids, HC.CURRENT, 1 )
elif status == HC.PENDING:
UpdateAutocompleteTagCache( tag_service_id, namespace_id, tag_id, new_hash_ids, HC.PENDING, 1 )
- UpdateCombinedMappings( namespace_id, tag_id, new_hash_ids, 'insert', HC.PENDING )
+ UpdateCombinedMappings( namespace_id, tag_id, new_hash_ids, HC.PENDING, 1 )
return num_rows_added
@@ -4302,14 +4334,16 @@ def InsertPetitions( namespace_id, tag_id, hash_ids, reason_id ):
def UpdateAutocompleteTagCache( tag_service_id, namespace_id, tag_id, hash_ids, status, direction ):
- file_service_info = self._c.execute( 'SELECT service_id, COUNT( * ) FROM files_info WHERE hash_id IN ' + HC.SplayListForDB( hash_ids ) + ' GROUP BY service_id;' ).fetchall()
+ #file_service_info = self._c.execute( 'SELECT service_id, COUNT( * ) FROM files_info WHERE hash_id IN ' + HC.SplayListForDB( hash_ids ) + ' GROUP BY service_id;' ).fetchall()
+
+ #file_service_info.append( ( self._combined_file_service_id, len( hash_ids ) ) )
- file_service_info.append( ( self._combined_file_service_id, len( hash_ids ) ) )
+ #if status == HC.CURRENT: critical_phrase = 'current_count = current_count + ?'
+ #elif status == HC.PENDING: critical_phrase = 'pending_count = pending_count + ?'
- if status == HC.CURRENT: critical_phrase = 'current_count = current_count + ?'
- elif status == HC.PENDING: critical_phrase = 'pending_count = pending_count + ?'
+ #self._c.executemany( 'UPDATE autocomplete_tags_cache SET ' + critical_phrase + ' WHERE file_service_id = ? AND tag_service_id = ? AND namespace_id = ? AND tag_id = ?;', [ ( count * direction, file_service_id, tag_service_id, namespace_id, tag_id ) for ( file_service_id, count ) in file_service_info ] )
- self._c.executemany( 'UPDATE autocomplete_tags_cache SET ' + critical_phrase + ' WHERE file_service_id = ? AND tag_service_id = ? AND namespace_id = ? AND tag_id = ?;', [ ( count * direction, file_service_id, tag_service_id, namespace_id, tag_id ) for ( file_service_id, count ) in file_service_info ] )
+ self._c.execute( 'DELETE FROM autocomplete_tags_cache WHERE namespace_id = ? AND tag_id = ?;', ( namespace_id, tag_id ) )
change_in_num_mappings = 0
@@ -5815,6 +5849,16 @@ def _UpdateDB( self, version ):
+ if version == 139:
+
+ self._combined_tag_service_id = self._GetServiceId( HC.COMBINED_TAG_SERVICE_KEY )
+ self._local_file_service_id = self._GetServiceId( HC.LOCAL_FILE_SERVICE_KEY )
+
+ self._c.execute( 'DELETE FROM autocomplete_tags_cache WHERE tag_service_id != ?;', ( self._combined_tag_service_id, ) )
+ self._c.execute( 'DELETE FROM autocomplete_tags_cache WHERE file_service_id != ?;', ( self._local_file_service_id, ) )
+ self._c.execute( 'DELETE FROM autocomplete_tags_cache WHERE current_count < ?;', ( 5, ) )
+
+
self._c.execute( 'UPDATE version SET version = ?;', ( version + 1, ) )
HC.is_db_updated = True
@@ -5849,6 +5893,8 @@ def _Vacuum( self ):
print( HC.ConvertJobKeyToString( job_key ) )
+ wx.CallLater( 1000 * 3600, job_key.Delete )
+
def pub_after_commit( self, topic, *args, **kwargs ): self._pubsubs.append( ( topic, args, kwargs ) )
@@ -7043,7 +7089,8 @@ def DAEMONSynchroniseRepositories():
print( HC.ConvertJobKeyToString( job_key ) )
- job_key.Finish()
+ if total_content_weight_processed > 0: job_key.Finish()
+ else: job_key.Delete()
except Exception as e:
@@ -7356,7 +7403,10 @@ def DAEMONSynchroniseSubscriptions():
print( HC.ConvertJobKeyToString( job_key ) )
- job_key.Finish()
+ job_key.DeleteVariable( 'popup_message_text_1' )
+
+ if len( successful_hashes ) > 0: job_key.Finish()
+ else: job_key.Delete()
last_checked = now
diff --git a/include/ClientGUI.py b/include/ClientGUI.py
index 867efdceb..e515c27d3 100755
--- a/include/ClientGUI.py
+++ b/include/ClientGUI.py
@@ -107,6 +107,7 @@ def __init__( self ):
HC.pubsub.sub( self, 'SetDBLockedStatus', 'db_locked_status' )
HC.pubsub.sub( self, 'SetDownloadsStatus', 'downloads_status' )
HC.pubsub.sub( self, 'SetInboxStatus', 'inbox_status' )
+ HC.pubsub.sub( self, 'SetMediaFocus', 'set_media_focus' )
self._menus = {}
@@ -298,6 +299,8 @@ def _THREADUploadPending( self, service_key ):
job_key.Finish()
+ wx.CallLater( 1000 * 3600, job_key.Delete )
+
HC.pubsub.pub( 'notify_new_pending' )
@@ -1662,7 +1665,7 @@ def _StartURLDownload( self ):
url_string = url
- job_key = HC.JobKey()
+ job_key = HC.JobKey( pausable = True, cancellable = True )
HC.pubsub.pub( 'message', job_key )
@@ -2117,6 +2120,8 @@ def SetDownloadsStatus( self, status ):
+ def SetMediaFocus( self ): self._SetMediaFocus()
+
def SetInboxStatus( self, status ):
if self.IsShown():
diff --git a/include/ClientGUICanvas.py b/include/ClientGUICanvas.py
index 5e084aa90..5dcffa253 100755
--- a/include/ClientGUICanvas.py
+++ b/include/ClientGUICanvas.py
@@ -4070,6 +4070,8 @@ def _Draw( self ):
wx_bitmap = wx.BitmapFromImage( image )
+ wx.CallAfter( image.Destroy )
+
else: wx_bitmap = hydrus_bitmap.GetWxBitmap()
dc.DrawBitmap( wx_bitmap, 0, 0 )
diff --git a/include/ClientGUICommon.py b/include/ClientGUICommon.py
index cff499798..813d242ad 100755
--- a/include/ClientGUICommon.py
+++ b/include/ClientGUICommon.py
@@ -1365,7 +1365,7 @@ def _InitialiseSizeAndPosition( self ):
if client_size[ self._resize_option_prefix + 'maximised' ]: self.Maximize()
- if client_size[ self._resize_option_prefix + 'fullscreen' ]: wx.CallAfter( self.ShowFullScreen, True, wx.FULLSCREEN_ALL )
+ if client_size[ self._resize_option_prefix + 'fullscreen' ]: self.ShowFullScreen( True, wx.FULLSCREEN_ALL )
def _RecordSizeAndPosition( self ):
@@ -2641,6 +2641,13 @@ def TryToDismiss( self ):
def Update( self ):
+ if self._job_key.IsDeleted():
+
+ self.TryToDismiss()
+
+ return
+
+
if self._job_key.HasVariable( 'popup_message_title' ):
text = self._job_key.GetVariable( 'popup_message_title' )
@@ -3103,8 +3110,6 @@ def __init__( self, parent, height, columns ):
ListCtrlAutoWidthMixin.__init__( self )
ColumnSorterMixin.__init__( self, num_columns )
- self.GetTopLevelParent().SetDoubleBuffered( False ) # windows double buffer makes listctrls refresh and bug out
-
self.itemDataMap = {}
self._next_data_index = 0
diff --git a/include/ClientGUIDialogs.py b/include/ClientGUIDialogs.py
index a8dbfabc3..aab2e90c1 100755
--- a/include/ClientGUIDialogs.py
+++ b/include/ClientGUIDialogs.py
@@ -107,7 +107,7 @@ def __init__( self, parent, title, style = wx.DEFAULT_DIALOG_STYLE | wx.RESIZE_B
wx.Dialog.__init__( self, parent, title = title, style = style, pos = pos )
- self.SetDoubleBuffered( True )
+ #self.SetDoubleBuffered( True )
self.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_BTNFACE ) )
@@ -5122,7 +5122,7 @@ def EventOK( self, event ):
url_string = title + ' ' + resolution + ' ' + extension
- job_key = HC.JobKey()
+ job_key = HC.JobKey( pausable = True, cancellable = True )
HydrusThreading.CallToThread( HydrusDownloading.THREADDownloadURL, job_key, url, url_string )
diff --git a/include/ClientGUIManagement.py b/include/ClientGUIManagement.py
index f15b0402d..115bd50ec 100755
--- a/include/ClientGUIManagement.py
+++ b/include/ClientGUIManagement.py
@@ -2265,7 +2265,7 @@ def SetSearchFocus( self, page_key ):
if page_key == self._page_key:
try: self._searchbox.SetFocus() # there's a chance this doesn't exist!
- except: pass
+ except: HC.pubsub.pub( 'set_media_focus' )
diff --git a/include/ClientGUIMedia.py b/include/ClientGUIMedia.py
index 5b93a3283..30fa02168 100755
--- a/include/ClientGUIMedia.py
+++ b/include/ClientGUIMedia.py
@@ -80,7 +80,7 @@ def __init__( self, parent, page_key, file_service_key, media_results ):
self.SetBackgroundColour( wx.WHITE )
- self.SetDoubleBuffered( True )
+ #self.SetDoubleBuffered( True )
self.SetScrollRate( 0, 50 )
@@ -1087,7 +1087,7 @@ def _DrawIndices( self, from_index, to_index ):
if hash in self._thumbnails_being_faded_in:
- ( original_bmp, alpha_bmp, canvas_bmp, x, y, num_frames_rendered ) = self._thumbnails_being_faded_in[ hash ]
+ ( original_bmp, alpha_bmp, x, y, num_frames_rendered ) = self._thumbnails_being_faded_in[ hash ]
current_row = i / self._num_columns
@@ -1096,7 +1096,7 @@ def _DrawIndices( self, from_index, to_index ):
x = current_col * thumbnail_span_width + CC.THUMBNAIL_MARGIN
y = current_row * thumbnail_span_height + CC.THUMBNAIL_MARGIN
- self._thumbnails_being_faded_in[ hash ] = ( original_bmp, alpha_bmp, canvas_bmp, x, y, num_frames_rendered )
+ self._thumbnails_being_faded_in[ hash ] = ( original_bmp, alpha_bmp, x, y, num_frames_rendered )
else:
@@ -1228,35 +1228,21 @@ def _FadeThumbnail( self, thumbnail ):
if ( x, y ) != ( -1, -1 ):
bmp = thumbnail.GetBmp()
+
+ image = bmp.ConvertToImage()
- hash = thumbnail.GetDisplayMedia().GetHash()
+ try: image.InitAlpha()
+ except: pass
- canvas_bmp = None
- '''
- ( thumbnail_span_width, thumbnail_span_height ) = self._thumbnail_span_dimensions
+ image = image.AdjustChannels( 1, 1, 1, 0.25 )
- canvas_bmp = wx.EmptyBitmap( thumbnail_span_width, thumbnail_span_height, 24 )
+ alpha_bmp = wx.BitmapFromImage( image, 32 )
- canvas_bmp_dc = wx.MemoryDC( canvas_bmp )
+ wx.CallAfter( image.Destroy )
- index = self._sorted_media.index( thumbnail )
+ hash = thumbnail.GetDisplayMedia().GetHash()
- ( from_index, to_index ) = self._drawn_index_bounds
-
- if from_index <= index and index <= to_index:
-
- big_canvas_bmp_dc = wx.MemoryDC( self._canvas_bmp )
-
- canvas_bmp_dc.Blit( 0, 0, thumbnail_span_width, thumbnail_span_height, big_canvas_bmp_dc, x, y )
-
- else:
-
- canvas_bmp_dc.SetBrush( wx.WHITE_BRUSH )
-
- canvas_bmp_dc.Clear()
-
- '''
- self._thumbnails_being_faded_in[ hash ] = ( bmp, None, canvas_bmp, x, y, 0 )
+ self._thumbnails_being_faded_in[ hash ] = ( bmp, alpha_bmp, x, y, 0 )
if not self._timer_animation.IsRunning(): self._timer_animation.Start( 1, wx.TIMER_ONE_SHOT )
@@ -1378,6 +1364,12 @@ def _RedrawCanvas( self ):
self._drawn_index_bounds = None
+ for ( original_bmp, alpha_bmp, x, y, num_frames_rendered ) in self._thumbnails_being_faded_in.values():
+
+ wx.CallAfter( original_bmp.Destroy )
+ wx.CallAfter( alpha_bmp.Destroy )
+
+
self._thumbnails_being_faded_in = {}
self._CleanCanvas()
@@ -2161,37 +2153,22 @@ def TIMEREventAnimation( self, event ):
all_info = self._thumbnails_being_faded_in.items()
- for ( hash, ( original_bmp, alpha_bmp, canvas_bmp, x, y, num_frames_rendered ) ) in all_info:
-
- if num_frames_rendered == 0:
-
- image = original_bmp.ConvertToImage()
-
- try: image.InitAlpha()
- except: pass
-
- image = image.AdjustChannels( 1, 1, 1, 0.25 )
-
- alpha_bmp = wx.BitmapFromImage( image, 32 )
-
+ for ( hash, ( original_bmp, alpha_bmp, x, y, num_frames_rendered ) ) in all_info:
num_frames_rendered += 1
- self._thumbnails_being_faded_in[ hash ] = ( original_bmp, alpha_bmp, canvas_bmp, x, y, num_frames_rendered )
+ self._thumbnails_being_faded_in[ hash ] = ( original_bmp, alpha_bmp, x, y, num_frames_rendered )
- if y < min_y or y > max_y or num_frames_rendered == 9:
+ if y < min_y or y > max_y or num_frames_rendered >= 9:
bmp_to_use = original_bmp
del self._thumbnails_being_faded_in[ hash ]
- else:
-
- #canvas_dc = wx.MemoryDC( canvas_bmp )
-
- #canvas_dc.DrawBitmap( alpha_bmp, 0, 0, True )
+ wx.CallAfter( original_bmp.Destroy )
+ wx.CallAfter( alpha_bmp.Destroy )
- #del canvas_dc
+ else:
bmp_to_use = alpha_bmp
diff --git a/include/HydrusConstants.py b/include/HydrusConstants.py
index cba71bddd..28a6b8ebe 100755
--- a/include/HydrusConstants.py
+++ b/include/HydrusConstants.py
@@ -65,7 +65,7 @@
# Misc
NETWORK_VERSION = 15
-SOFTWARE_VERSION = 139
+SOFTWARE_VERSION = 140
UNSCALED_THUMBNAIL_DIMENSIONS = ( 200, 200 )
@@ -1321,7 +1321,7 @@ def ShowTextDefault( text ):
ShowText = ShowTextDefault
-def SplayListForDB( xs ): return '(' + ','.join( [ '"' + u( x ) + '"' for x in xs ] ) + ')'
+def SplayListForDB( xs ): return '(' + ','.join( ( '"' + u( x ) + '"' for x in xs ) ) + ')'
def SplayTupleListForDB( first_column_name, second_column_name, xys ): return ' OR '.join( [ '( ' + first_column_name + '=' + u( x ) + ' AND ' + second_column_name + ' IN ' + SplayListForDB( ys ) + ' )' for ( x, ys ) in xys ] )
@@ -1950,6 +1950,7 @@ def __init__( self, pausable = False, cancellable = False ):
self._pausable = pausable
self._cancellable = cancellable
+ self._deleted = threading.Event()
self._begun = threading.Event()
self._done = threading.Event()
self._cancelled = threading.Event()
@@ -1974,6 +1975,13 @@ def Cancel( self ):
self.Finish()
+ def Delete( self ):
+
+ self.Finish()
+
+ self._deleted.set()
+
+
def DeleteVariable( self, name ):
with self._variable_lock:
@@ -2002,6 +2010,8 @@ def IsCancellable( self ): return self._cancellable and not self.IsDone()
def IsCancelled( self ): return shutdown or self._cancelled.is_set()
+ def IsDeleted( self ): return shutdown or self._deleted.is_set()
+
def IsDone( self ): return shutdown or self._done.is_set()
def IsPausable( self ): return self._pausable and not self.IsDone()
@@ -2514,13 +2524,13 @@ def GetPetitionString( self ):
( old_tag, new_tag ) = self._petition_data
- content_phrase = ' sibling ' + old_tag + '->' + new_tag
+ content_phrase = 'sibling ' + old_tag + '->' + new_tag
elif self._petition_type == CONTENT_DATA_TYPE_TAG_PARENTS:
( old_tag, new_tag ) = self._petition_data
- content_phrase = ' parent ' + old_tag + '->' + new_tag
+ content_phrase = 'parent ' + old_tag + '->' + new_tag
return action_word + content_phrase + os.linesep * 2 + self._reason
diff --git a/include/HydrusNetworking.py b/include/HydrusNetworking.py
index c74a65613..b40d7e7c5 100644
--- a/include/HydrusNetworking.py
+++ b/include/HydrusNetworking.py
@@ -297,7 +297,10 @@ def _ParseResponse( self, response, report_hooks ):
raise Exception( 'Response was longer than suggested!' )
- for hook in report_hooks: hook( content_length, len( data ) )
+ for hook in report_hooks:
+
+ hook( content_length, len( data ) )
+
size_of_response = len( data )
@@ -369,7 +372,10 @@ def _WriteResponseToPath( self, response, report_hooks ):
f.write( block )
- for hook in report_hooks: hook( content_length, size_of_response )
+ for hook in report_hooks:
+
+ hook( content_length, size_of_response )
+