summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xmat2
-rwxr-xr-xmat-gui10
2 files changed, 8 insertions, 4 deletions
diff --git a/mat b/mat
index 64dccc4..0139803 100755
--- a/mat
+++ b/mat
@@ -52,7 +52,7 @@ def list_meta(class_file, filename, add2archive):
52 print ('Harmful metadata found:') 52 print ('Harmful metadata found:')
53 meta = class_file.get_meta() 53 meta = class_file.get_meta()
54 if meta: 54 if meta:
55 for key, value in class_file.get_meta().iteritems(): 55 for key, value in class_file.get_meta().items():
56 print('\t%s: %s' % (key, value)) 56 print('\t%s: %s' % (key, value))
57 return 0 57 return 0
58 58
diff --git a/mat-gui b/mat-gui
index 430641a..feccbd2 100755
--- a/mat-gui
+++ b/mat-gui
@@ -13,7 +13,11 @@ import logging
13import os 13import os
14import sys 14import sys
15import xml.sax 15import xml.sax
16import urllib2 16
17try:
18 from urllib2 import unquote
19except ImportError: # python3
20 from urllib.parse import unquote
17 21
18from libmat import mat 22from libmat import mat
19from libmat import strippers 23from libmat import strippers
@@ -168,7 +172,7 @@ class GUI(object):
168 metadataPopupListStore.append([_('No metadata found'), '']) 172 metadataPopupListStore.append([_('No metadata found'), ''])
169 else: 173 else:
170 self.liststore[row][2] = _('Dirty') 174 self.liststore[row][2] = _('Dirty')
171 for i, j in self.liststore[row][0].file.get_meta().iteritems(): 175 for i, j in self.liststore[row][0].file.get_meta().items():
172 metadataPopupListStore.append([i, j]) 176 metadataPopupListStore.append([i, j])
173 177
174 popup_metadata = self.builder.get_object('MetadataPopup') 178 popup_metadata = self.builder.get_object('MetadataPopup')
@@ -260,7 +264,7 @@ non-anonymised) file from output archive'))
260 """ Since the dragged urls are ugly, 264 """ Since the dragged urls are ugly,
261 we need to process them 265 we need to process them
262 """ 266 """
263 url = urllib2.unquote(url) # unquote url 267 url = unquote(url) # unquote url
264 url = url.decode('utf-8') # decode in utf-8 268 url = url.decode('utf-8') # decode in utf-8
265 if url.startswith('file:\\\\\\'): # windows 269 if url.startswith('file:\\\\\\'): # windows
266 return url[8:] # 8 is len('file:///') 270 return url[8:] # 8 is len('file:///')