diff --git a/etc/utils/add_version.py b/etc/utils/add_version.py
index 8f2b2abc..e1840d21 100755
--- a/etc/utils/add_version.py
+++ b/etc/utils/add_version.py
@@ -17,6 +17,8 @@
"""
+from __future__ import absolute_import, print_function
+
import sys
import re
@@ -25,12 +27,12 @@ def find_version(path):
for line in f:
index = line.find('GMVAULT_VERSION="')
if index > -1:
- print(line[index+17:-2])
+ print((line[index+17:-2]))
return line[index+17:-2]
raise Exception("Cannot find GMVAULT_VERSION in %s\n" % (path))
-VERSION_PATTERN = r'###GMVAULTVERSION###'
+VERSION_PATTERN = r'###GMVAULTVERSION###'
VERSION_RE = re.compile(VERSION_PATTERN)
def add_version(a_input, a_output, a_version):
@@ -43,7 +45,7 @@ def add_version(a_input, a_output, a_version):
if __name__ == '__main__':
if len(sys.argv) < 4:
- print("Error: need more parameters for %s." % (sys.argv[0]))
+ print(("Error: need more parameters for %s." % (sys.argv[0])))
print("Usage: add_version.py input_path output_path version.")
exit(-1)
diff --git a/etc/utils/find_version.py b/etc/utils/find_version.py
index cf8bdeab..1d804623 100755
--- a/etc/utils/find_version.py
+++ b/etc/utils/find_version.py
@@ -17,6 +17,8 @@
"""
+from __future__ import absolute_import, print_function
+
import sys
@@ -25,7 +27,7 @@ def find_version(path):
for line in f:
index = line.find('GMVAULT_VERSION = "')
if index > -1:
- print(line[index+19:-2])
+ print((line[index+19:-2]))
res = line[index+19:-2]
return res.strip()
diff --git a/etc/utils/flask_stats.py b/etc/utils/flask_stats.py
index 79239e10..56c8a699 100644
--- a/etc/utils/flask_stats.py
+++ b/etc/utils/flask_stats.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
from flask import Flask
import scrapping
diff --git a/etc/utils/mem-profiling-tools/dowser/__init__.py b/etc/utils/mem-profiling-tools/dowser/__init__.py
index fa562b17..2123986b 100644
--- a/etc/utils/mem-profiling-tools/dowser/__init__.py
+++ b/etc/utils/mem-profiling-tools/dowser/__init__.py
@@ -1,8 +1,9 @@
+from __future__ import absolute_import
import cgi
import gc
import os
localDir = os.path.join(os.getcwd(), os.path.dirname(__file__))
-from StringIO import StringIO
+from io import BytesIO
import sys
import threading
import time
@@ -13,9 +14,11 @@
import cherrypy
-import reftree
+from . import reftree
+from six import iteritems
+
def get_repr(obj, limit=250):
return cgi.escape(reftree.get_repr(obj, limit))
@@ -44,10 +47,10 @@ def template(name, **params):
class Root:
-
+
period = 5
maxhistory = 300
-
+
def __init__(self):
self.history = {}
self.samples = 0
@@ -55,16 +58,16 @@ def __init__(self):
cherrypy.engine.subscribe('exit', self.stop)
self.runthread = threading.Thread(target=self.start)
self.runthread.start()
-
+
def start(self):
self.running = True
while self.running:
self.tick()
time.sleep(self.period)
-
+
def tick(self):
gc.collect()
-
+
typecounts = {}
for obj in gc.get_objects():
objtype = type(obj)
@@ -72,34 +75,34 @@ def tick(self):
typecounts[objtype] += 1
else:
typecounts[objtype] = 1
-
- for objtype, count in typecounts.iteritems():
+
+ for objtype, count in iteritems(typecounts):
typename = objtype.__module__ + "." + objtype.__name__
if typename not in self.history:
self.history[typename] = [0] * self.samples
self.history[typename].append(count)
-
+
samples = self.samples + 1
-
+
# Add dummy entries for any types which no longer exist
- for typename, hist in self.history.iteritems():
+ for typename, hist in iteritems(self.history):
diff = samples - len(hist)
if diff > 0:
hist.extend([0] * diff)
-
+
# Truncate history to self.maxhistory
if samples > self.maxhistory:
- for typename, hist in self.history.iteritems():
+ for typename, hist in iteritems(self.history):
hist.pop(0)
else:
self.samples = samples
-
+
def stop(self):
self.running = False
-
+
def index(self, floor=0):
rows = []
- typenames = self.history.keys()
+ typenames = list(self.history.keys())
typenames.sort()
for typename in typenames:
hist = self.history[typename]
@@ -117,7 +120,7 @@ def index(self, floor=0):
rows.append(row)
return template("graphs.html", output="\n".join(rows))
index.exposed = True
-
+
def chart(self, typename):
"""Return a sparkline chart of the given type."""
data = self.history[typename]
@@ -128,28 +131,28 @@ def chart(self, typename):
draw.line([(i, int(height - (v * scale))) for i, v in enumerate(data)],
fill="#009900")
del draw
-
- f = StringIO()
+
+ f = BytesIO()
im.save(f, "PNG")
result = f.getvalue()
-
+
cherrypy.response.headers["Content-Type"] = "image/png"
return result
chart.exposed = True
-
+
def trace(self, typename, objid=None):
gc.collect()
-
+
if objid is None:
rows = self.trace_all(typename)
else:
rows = self.trace_one(typename, objid)
-
+
return template("trace.html", output="\n".join(rows),
typename=cgi.escape(typename),
objid=str(objid or ''))
trace.exposed = True
-
+
def trace_all(self, typename):
rows = []
for obj in gc.get_objects():
@@ -160,7 +163,7 @@ def trace_all(self, typename):
if not rows:
rows = ["
Referents (Children) ')
for child in gc.get_referents(obj):
@@ -203,10 +206,10 @@ def trace_one(self, typename, objid):
if not rows:
rows = ["
The object you requested was not found. "]
return rows
-
+
def tree(self, typename, objid):
gc.collect()
-
+
rows = []
objid = int(objid)
all_objs = gc.get_objects()
@@ -218,17 +221,17 @@ def tree(self, typename, objid):
"of the correct type."]
else:
rows.append('
')
-
+
tree = ReferrerTree(obj)
tree.ignore(all_objs)
for depth, parentid, parentrepr in tree.walk(maxresults=1000):
rows.append(parentrepr)
-
+
rows.append('
')
break
if not rows:
rows = ["
The object you requested was not found. "]
-
+
params = {'output': "\n".join(rows),
'typename': cgi.escape(typename),
'objid': str(objid),
@@ -254,17 +257,17 @@ def tree(self, typename, objid):
class ReferrerTree(reftree.Tree):
-
+
ignore_modules = True
-
+
def _gen(self, obj, depth=0):
if self.maxdepth and depth >= self.maxdepth:
yield depth, 0, "---- Max depth reached ----"
raise StopIteration
-
+
if isinstance(obj, ModuleType) and self.ignore_modules:
raise StopIteration
-
+
refs = gc.get_referrers(obj)
refiter = iter(refs)
self.ignore(refs, refiter)
@@ -274,16 +277,16 @@ def _gen(self, obj, depth=0):
if (isinstance(ref, FrameType)
and ref.f_code.co_filename in (thisfile, self.filename)):
continue
-
+
# Exclude all functions and classes from this module or reftree.
mod = getattr(ref, "__module__", "")
if "dowser" in mod or "reftree" in mod or mod == '__main__':
continue
-
+
# Exclude all parents in our ignore list.
if id(ref) in self._ignore:
continue
-
+
# Yield the (depth, id, repr) of our object.
yield depth, 0, '%s
' % (" " * depth)
if id(ref) in self.seen:
@@ -291,21 +294,21 @@ def _gen(self, obj, depth=0):
else:
self.seen[id(ref)] = None
yield depth, id(ref), self.get_repr(ref, obj)
-
+
for parent in self._gen(ref, depth + 1):
yield parent
yield depth, 0, '%s
' % (" " * depth)
-
+
def get_repr(self, obj, referent=None):
"""Return an HTML tree block describing the given object."""
objtype = type(obj)
typename = objtype.__module__ + "." + objtype.__name__
prettytype = typename.replace("__builtin__.", "")
-
+
name = getattr(obj, "__name__", "")
if name:
prettytype = "%s %r" % (prettytype, name)
-
+
key = ""
if referent:
key = self.get_refkey(obj, referent)
@@ -315,14 +318,14 @@ def get_repr(self, obj, referent=None):
% (url("/trace/%s/%s" % (typename, id(obj))),
id(obj), prettytype, key, get_repr(obj, 100))
)
-
+
def get_refkey(self, obj, referent):
"""Return the dict key or attribute name of obj which refers to referent."""
if isinstance(obj, dict):
- for k, v in obj.iteritems():
+ for k, v in iteritems(obj):
if v is referent:
return " (via its %r key)" % k
-
+
for k in dir(obj) + ['__dict__']:
if getattr(obj, k, None) is referent:
return " (via its %r attribute)" % k
diff --git a/etc/utils/mem-profiling-tools/dowser/reftree.py b/etc/utils/mem-profiling-tools/dowser/reftree.py
index d0aec28e..2e4dd042 100644
--- a/etc/utils/mem-profiling-tools/dowser/reftree.py
+++ b/etc/utils/mem-profiling-tools/dowser/reftree.py
@@ -1,36 +1,39 @@
+from __future__ import absolute_import, print_function
+
import gc
import sys
from types import FrameType
+import six
class Tree:
-
+
def __init__(self, obj):
self.obj = obj
self.filename = sys._getframe().f_code.co_filename
self._ignore = {}
-
+
def ignore(self, *objects):
for obj in objects:
self._ignore[id(obj)] = None
-
+
def ignore_caller(self):
f = sys._getframe() # = this function
cur = f.f_back # = the function that called us (probably 'walk')
self.ignore(cur, cur.f_builtins, cur.f_locals, cur.f_globals)
caller = f.f_back # = the 'real' caller
self.ignore(caller, caller.f_builtins, caller.f_locals, caller.f_globals)
-
+
def walk(self, maxresults=100, maxdepth=None):
"""Walk the object tree, ignoring duplicates and circular refs."""
self.seen = {}
self.ignore(self, self.__dict__, self.obj, self.seen, self._ignore)
-
+
# Ignore the calling frame, its builtins, globals and locals
self.ignore_caller()
-
+
self.maxdepth = maxdepth
count = 0
for result in self._gen(self.obj):
@@ -39,12 +42,12 @@ def walk(self, maxresults=100, maxdepth=None):
if maxresults and count >= maxresults:
yield 0, 0, "==== Max results reached ===="
raise StopIteration
-
+
def print_tree(self, maxresults=100, maxdepth=None):
"""Walk the object tree, pretty-printing each branch."""
self.ignore_caller()
for depth, refid, rep in self.walk(maxresults, maxdepth):
- print ("%9d" % refid), (" " * depth * 2), rep
+ print(("%9d" % refid), (" " * depth * 2), rep)
def _repr_container(obj):
@@ -64,25 +67,25 @@ def repr_frame(obj):
def get_repr(obj, limit=250):
typename = getattr(type(obj), "__name__", None)
handler = globals().get("repr_%s" % typename, repr)
-
+
try:
result = handler(obj)
except:
result = "unrepresentable object: %r" % sys.exc_info()[1]
-
+
if len(result) > limit:
result = result[:limit] + "..."
-
+
return result
class ReferentTree(Tree):
-
+
def _gen(self, obj, depth=0):
if self.maxdepth and depth >= self.maxdepth:
yield depth, 0, "---- Max depth reached ----"
raise StopIteration
-
+
for ref in gc.get_referents(obj):
if id(ref) in self._ignore:
continue
@@ -92,18 +95,18 @@ def _gen(self, obj, depth=0):
else:
self.seen[id(ref)] = None
yield depth, id(ref), get_repr(ref)
-
+
for child in self._gen(ref, depth + 1):
yield child
class ReferrerTree(Tree):
-
+
def _gen(self, obj, depth=0):
if self.maxdepth and depth >= self.maxdepth:
yield depth, 0, "---- Max depth reached ----"
raise StopIteration
-
+
refs = gc.get_referrers(obj)
refiter = iter(refs)
self.ignore(refs, refiter)
@@ -112,7 +115,7 @@ def _gen(self, obj, depth=0):
if isinstance(ref, FrameType):
if ref.f_code.co_filename == self.filename:
continue
-
+
if id(ref) in self._ignore:
continue
elif id(ref) in self.seen:
@@ -121,23 +124,23 @@ def _gen(self, obj, depth=0):
else:
self.seen[id(ref)] = None
yield depth, id(ref), get_repr(ref)
-
+
for parent in self._gen(ref, depth + 1):
yield parent
class CircularReferents(Tree):
-
+
def walk(self, maxresults=100, maxdepth=None):
"""Walk the object tree, showing circular referents."""
self.stops = 0
self.seen = {}
self.ignore(self, self.__dict__, self.seen, self._ignore)
-
+
# Ignore the calling frame, its builtins, globals and locals
self.ignore_caller()
-
+
self.maxdepth = maxdepth
count = 0
for result in self._gen(self.obj):
@@ -146,15 +149,15 @@ def walk(self, maxresults=100, maxdepth=None):
if maxresults and count >= maxresults:
yield 0, 0, "==== Max results reached ===="
raise StopIteration
-
+
def _gen(self, obj, depth=0, trail=None):
if self.maxdepth and depth >= self.maxdepth:
self.stops += 1
raise StopIteration
-
+
if trail is None:
trail = []
-
+
for ref in gc.get_referents(obj):
if id(ref) in self._ignore:
continue
@@ -162,21 +165,21 @@ def _gen(self, obj, depth=0, trail=None):
continue
else:
self.seen[id(ref)] = None
-
+
refrepr = get_repr(ref)
if id(ref) == id(self.obj):
yield trail + [refrepr,]
-
+
for child in self._gen(ref, depth + 1, trail + [refrepr,]):
yield child
-
+
def print_tree(self, maxresults=100, maxdepth=None):
"""Walk the object tree, pretty-printing each branch."""
self.ignore_caller()
for trail in self.walk(maxresults, maxdepth):
- print trail
+ print(trail)
if self.stops:
- print "%s paths stopped because max depth reached" % self.stops
+ print("%s paths stopped because max depth reached" % self.stops)
def count_objects():
@@ -184,7 +187,7 @@ def count_objects():
for obj in gc.get_objects():
objtype = type(obj)
d[objtype] = d.get(objtype, 0) + 1
- d = [(v, k) for k, v in d.iteritems()]
+ d = [(v, k) for k, v in six.iteritems(d)]
d.sort()
return d
diff --git a/etc/utils/mem-profiling-tools/memdebug.py b/etc/utils/mem-profiling-tools/memdebug.py
index bdf412ad..58baaf11 100644
--- a/etc/utils/mem-profiling-tools/memdebug.py
+++ b/etc/utils/mem-profiling-tools/memdebug.py
@@ -1,5 +1,6 @@
# memdebug.py
+from __future__ import absolute_import
import cherrypy
import dowser
diff --git a/etc/utils/scrapping.py b/etc/utils/scrapping.py
index 9593a761..7d419862 100755
--- a/etc/utils/scrapping.py
+++ b/etc/utils/scrapping.py
@@ -19,6 +19,8 @@
#quick and dirty scrapper to get number of downloads
+from __future__ import absolute_import, print_function
+
import json
import datetime
import mechanize
@@ -41,16 +43,16 @@ def get_from_bitbucket():
#body_tag = soup.body
all_tables = soup.findAll('table')
- table = soup.find(lambda tag: tag.name=='table' and tag.has_key('id') and tag['id']=="uploaded-files")
+ table = soup.find(lambda tag: tag.name=='table' and 'id' in tag and tag['id']=="uploaded-files")
rows = table.findAll(lambda tag: tag.name=='tr')
- res = {}
+ res = {}
for row in rows:
-
+
tds = row.findAll(lambda tag: tag.name == 'td')
- #print("tds = %s\n" %(tds))
+ #print("tds = %s\n" %(tds))
td_number = 0
name = None
@@ -73,10 +75,10 @@ def get_from_bitbucket():
return res
def get_from_pypi(url):
-
+
res = {}
- print("Get info from pypi (url= %s)\n" % (url))
+ print(("Get info from pypi (url= %s)\n" % (url)))
br = mechanize.Browser()
br.open(url)
@@ -93,12 +95,12 @@ def get_from_pypi(url):
#print("rows = %s\n" %(rows))
for row in rows:
-
+
tds = row.findAll(lambda tag: tag.name == 'td')
- #print("tds = %s\n" %(tds))
+ #print("tds = %s\n" %(tds))
- #ignore tds that are too small
+ #ignore tds that are too small
if len(tds) < 6:
#print("ignore td = %s\n" % (tds))
continue
@@ -142,20 +144,20 @@ def get_stats(return_type):
res.update(get_from_pypi("https://pypi.python.org/pypi/gmvault/1.8-beta"))
res.update(get_from_pypi("https://pypi.python.org/pypi/gmvault/1.7-beta"))
- #print("name , nb_downloads")
+ #print("name , nb_downloads")
total = 0
win_total = 0
lin_total = 0
mac_total = 0
v17_total = 0
- v18_total = 0
- v181_total = 0
+ v18_total = 0
+ v181_total = 0
pypi_total = 0
src_total = 0
for key in res.keys():
#print("key= %s: (%s)\n" %(key, res[key]))
if key.endswith(".exe"):
- win_total += res[key]
+ win_total += res[key]
elif "macosx" in key:
mac_total += res[key]
else:
@@ -178,11 +180,11 @@ def get_stats(return_type):
#print("%s, %s\n" % (key, res[key]))
total += res[key]
- total += TOTAL_PREVIOUS_VERSIONS
+ total += TOTAL_PREVIOUS_VERSIONS
win_total += WIN_TOTAL_PREVIOUS_VERSIONS
lin_total += LIN_TOTAL_PREVIOUS_VERSIONS
mac_total += MAC_TOTAL_PREVIOUS_VERSIONS
- pypi_total += PYPI_TOTAL_PREVIOUS_VERSIONS
+ pypi_total += PYPI_TOTAL_PREVIOUS_VERSIONS
src_total += SRC_TOTAL_PREVIOUS_VERSIONS
the_str = ""
@@ -204,6 +206,6 @@ def get_stats(return_type):
if __name__ == "__main__":
- print(get_stats("JSON"))
+ print((get_stats("JSON")))
diff --git a/setup.py b/setup.py
index f6f07f85..dcb406c5 100644
--- a/setup.py
+++ b/setup.py
@@ -16,6 +16,8 @@
along with this program. If not, see
.
"""
+from __future__ import absolute_import, print_function
+
import os
from setuptools import setup
@@ -61,11 +63,11 @@ def find_version(path):
author_email='guillaume.aubert@gmail.com',
url='http://www.gmvault.org',
license='AGPLv3',
- packages=['gmv','gmv.conf', 'gmv.conf.utils'],
- package_dir = {'gmv': './src/gmv'},
+ packages=['gmv', 'gmv.conf', 'gmv.conf.utils'],
+ package_dir={'': 'src'},
scripts=['./etc/scripts/gmvault'],
package_data={'': ['release-note.txt']},
include_package_data=True,
#install_requires=['argparse', 'Logbook==0.4.1', 'IMAPClient==0.9.2','gdata==2.0.17']
- install_requires=['argparse', 'Logbook==0.10.1', 'IMAPClient==0.13', 'chardet==2.3.0']
+ install_requires=['argparse', 'Logbook==0.10.1', 'IMAPClient==0.13', 'chardet==2.3.0', 'six']
)
diff --git a/src/gmv/blowfish.py b/src/gmv/blowfish.py
index 34602089..51e19c16 100644
--- a/src/gmv/blowfish.py
+++ b/src/gmv/blowfish.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-
+
# blowfish.py
# Copyright (C) 2002 Michael Gilfix
#
@@ -16,7 +16,7 @@
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
-
+
# This software was modified by Ivan Voras: CTR cipher mode of
# operation was added, together with testing and example code.
# These changes are (c) 2007./08. Ivan Voras
@@ -24,7 +24,7 @@
# GPL or Artistic License, the same as the original module.
# All disclaimers of warranty from the original module also
# apply to these changes.
-
+
# Further modifications by Neil Tallim to make use
# of more modern Python practises and features, improving
# performance and, in this maintainer's opinion, readability.
@@ -33,18 +33,37 @@
# June 13, 2010, subject to the terms of the original module.
"""
Blowfish Encryption
-
+
This module is a pure python implementation of Bruce Schneier's
encryption scheme 'Blowfish'. Blowish is a 16-round Feistel Network
cipher and offers substantial speed gains over DES.
-
+
The key is a string of length anywhere between 64 and 448 bits, or
equivalently 8 and 56 bytes. The encryption and decryption functions operate
on 64-bit blocks, or 8-byte strings.
"""
+from __future__ import absolute_import, print_function
+
import array
import struct
-
+from six.moves import range
+
+
+def _ensure_bytearray(s):
+ """Convert bytes to bytearrays and complain otherwise"""
+ if not isinstance(s, bytearray):
+ if not isinstance(s, bytes):
+ raise TypeError('Expected object of type bytes or bytearray, got: '
+ '{0}'.format(type(s)))
+ else:
+ s = bytearray(s)
+ return s
+
+def _chr(i):
+ """Python 2/3 compatible version of chr for byte strings"""
+ return bytes(bytearray((i,)))
+
+
class Blowfish:
"""
Implements the encryption and decryption functionality of the Blowfish
@@ -53,29 +72,31 @@ class Blowfish:
# Key restrictions
KEY_MIN_LEN = 8 #64 bits
KEY_MAX_LEN = 56 #448 bits
-
+
# Cipher directions
ENCRYPT = 0
DECRYPT = 1
-
+
# For _round()
- _MODULUS = 2L ** 32
-
+ _MODULUS = 2 ** 32
+
# CTR constants
_BLOCK_SIZE = 8
-
+
def __init__(self, key):
"""
Creates an instance of blowfish using 'key' as the encryption key.
-
+
Key is a string of bytes, used to seed calculations.
Once the instance of the object is created, the key is no longer necessary.
"""
+ key = _ensure_bytearray(key)
+
if not self.KEY_MIN_LEN <= len(key) <= self.KEY_MAX_LEN:
raise ValueError("Attempted to initialize Blowfish cipher with key of invalid length: %(len)i" % {
'len': len(key),
})
-
+
self._p_boxes = array.array('I', [
0x243F6A88, 0x85A308D3, 0x13198A2E, 0x03707344,
0xA4093822, 0x299F31D0, 0x082EFA98, 0xEC4E6C89,
@@ -83,7 +104,7 @@ def __init__(self, key):
0xC0AC29B7, 0xC97C50DD, 0x3F84D5B5, 0xB5470917,
0x9216D5D9, 0x8979FB1B
])
-
+
self._s_boxes = (
array.array('I', [
0xD1310BA6, 0x98DFB5AC, 0x2FFD72DB, 0xD01ADFB7,
@@ -350,36 +371,36 @@ def __init__(self, key):
0xB74E6132, 0xCE77E25B, 0x578FDFE3, 0x3AC372E6
])
)
-
+
# Cycle through the p-boxes and round-robin XOR the
# key with the p-boxes
key_len = len(key)
index = 0
- for i in xrange(len(self._p_boxes)):
+ for i in range(len(self._p_boxes)):
self._p_boxes[i] = self._p_boxes[i] ^ (
- (ord(key[index % key_len]) << 24) +
- (ord(key[(index + 1) % key_len]) << 16) +
- (ord(key[(index + 2) % key_len]) << 8) +
- (ord(key[(index + 3) % key_len]))
+ (key[index % key_len] << 24) +
+ (key[(index + 1) % key_len] << 16) +
+ (key[(index + 2) % key_len] << 8) +
+ (key[(index + 3) % key_len])
)
index += 4
-
+
# For the chaining process
l = r = 0
-
+
# Begin chain replacing the p-boxes
- for i in xrange(0, len(self._p_boxes), 2):
+ for i in range(0, len(self._p_boxes), 2):
(l, r) = self.cipher(l, r, self.ENCRYPT)
self._p_boxes[i] = l
self._p_boxes[i + 1] = r
-
+
# Chain replace the s-boxes
- for i in xrange(len(self._s_boxes)):
- for j in xrange(0, len(self._s_boxes[i]), 2):
+ for i in range(len(self._s_boxes)):
+ for j in range(0, len(self._s_boxes[i]), 2):
(l, r) = self.cipher(l, r, self.ENCRYPT)
self._s_boxes[i][j] = l
self._s_boxes[i][j + 1] = r
-
+
def initCTR(self, iv=0):
"""
Initializes CTR engine for encryption or decryption.
@@ -389,18 +410,18 @@ def initCTR(self, iv=0):
'target-len': self._BLOCK_SIZE,
'q-len': struct.calcsize("Q"),
})
-
+
self._ctr_iv = iv
self._calcCTRBuf()
-
+
def cipher(self, xl, xr, direction):
"""
Encrypts a 64-bit block of data where xl is the upper 32 bits and xr is
the lower 32-bits.
-
+
'direction' is the direction to apply the cipher, either ENCRYPT or
DECRYPT class-constants.
-
+
Returns a tuple of either encrypted or decrypted data of the left half
and right half of the 64-bit block.
"""
@@ -421,70 +442,75 @@ def cipher(self, xl, xr, direction):
xr = xr ^ self._p_boxes[1]
xl = xl ^ self._p_boxes[0]
return (xl, xr)
-
+
def encrypt(self, data):
"""
Encrypt an 8-byte (64-bit) block of text where 'data' is an 8 byte
string.
-
+
Returns an 8-byte encrypted string.
"""
+ data = _ensure_bytearray(data)
+
if not len(data) == 8:
raise ValueError("Attempted to encrypt data of invalid block length: %(len)i" % {
'len': len(data),
})
-
+
# Use big endianess since that's what everyone else uses
- xl = (ord(data[3])) | (ord(data[2]) << 8) | (ord(data[1]) << 16) | (ord(data[0]) << 24)
- xr = (ord(data[7])) | (ord(data[6]) << 8) | (ord(data[5]) << 16) | (ord(data[4]) << 24)
-
+ xl = (data[3]) | (data[2] << 8) | (data[1] << 16) | (data[0] << 24)
+ xr = (data[7]) | (data[6] << 8) | (data[5] << 16) | (data[4] << 24)
+
(cl, cr) = self.cipher(xl, xr, self.ENCRYPT)
- chars = ''.join ([
- chr((cl >> 24) & 0xFF), chr((cl >> 16) & 0xFF), chr((cl >> 8) & 0xFF), chr(cl & 0xFF),
- chr((cr >> 24) & 0xFF), chr((cr >> 16) & 0xFF), chr((cr >> 8) & 0xFF), chr(cr & 0xFF)
+ chars = b''.join ([
+ _chr((cl >> 24) & 0xFF), _chr((cl >> 16) & 0xFF), _chr((cl >> 8) & 0xFF), _chr(cl & 0xFF),
+ _chr((cr >> 24) & 0xFF), _chr((cr >> 16) & 0xFF), _chr((cr >> 8) & 0xFF), _chr(cr & 0xFF)
])
return chars
-
+
def decrypt(self, data):
"""
Decrypt an 8 byte (64-bit) encrypted block of text, where 'data' is the
8-byte encrypted string.
-
+
Returns an 8-byte string of plaintext.
"""
+ data = _ensure_bytearray(data)
+
if not len(data) == 8:
raise ValueError("Attempted to encrypt data of invalid block length: %(len)i" % {
'len': len(data),
})
-
+
# Use big endianess since that's what everyone else uses
- cl = (ord(data[3])) | (ord(data[2]) << 8) | (ord(data[1]) << 16) | (ord(data[0]) << 24)
- cr = (ord(data[7])) | (ord(data[6]) << 8) | (ord(data[5]) << 16) | (ord(data[4]) << 24)
-
+ cl = (data[3]) | (data[2] << 8) | (data[1] << 16) | (data[0] << 24)
+ cr = (data[7]) | (data[6] << 8) | (data[5] << 16) | (data[4] << 24)
+
(xl, xr) = self.cipher (cl, cr, self.DECRYPT)
- return ''.join ([
- chr((xl >> 24) & 0xFF), chr((xl >> 16) & 0xFF), chr((xl >> 8) & 0xFF), chr(xl & 0xFF),
- chr((xr >> 24) & 0xFF), chr((xr >> 16) & 0xFF), chr((xr >> 8) & 0xFF), chr(xr & 0xFF)
+ return b''.join ([
+ _chr((xl >> 24) & 0xFF), _chr((xl >> 16) & 0xFF), _chr((xl >> 8) & 0xFF), _chr(xl & 0xFF),
+ _chr((xr >> 24) & 0xFF), _chr((xr >> 16) & 0xFF), _chr((xr >> 8) & 0xFF), _chr(xr & 0xFF)
])
-
+
def encryptCTR(self, data):
"""
Encrypts an arbitrary string and returns the encrypted string.
-
+
This method can be called successively for multiple string blocks.
"""
- if not type(data) is str:
+ data = _ensure_bytearray(data)
+ if not isinstance(data, bytearray):
raise TypeError("Only 8-bit strings are supported")
-
- return ''.join([chr(ord(ch) ^ self._nextCTRByte()) for ch in data])
-
+
+ return b''.join([_chr(ch ^ self._nextCTRByte()) for ch in data])
+
def decryptCTR(self, data):
"""
Decrypts a string encrypted with encryptCTR() and returns the original
string.
"""
return self.encryptCTR(data)
-
+
def _calcCTRBuf(self):
"""
Calculates one block of CTR keystream.
@@ -492,80 +518,80 @@ def _calcCTRBuf(self):
self._ctr_cks = self.encrypt(struct.pack("Q", self._ctr_iv)) # keystream block
self._ctr_iv += 1
self._ctr_pos = 0
-
+
def _nextCTRByte(self):
"""
Returns one byte of CTR keystream.
"""
- b = ord(self._ctr_cks[self._ctr_pos])
+ b = self._ctr_cks[self._ctr_pos]
self._ctr_pos += 1
-
+
if self._ctr_pos >= len(self._ctr_cks):
self._calcCTRBuf()
return b
-
+
def _round(self, xl):
"""
Performs an obscuring function on the 32-bit block of data, 'xl', which
is the left half of the 64-bit block of data.
-
+
Returns the 32-bit result as a long integer.
"""
# Perform all ops as longs then and out the last 32-bits to
# obtain the integer
- f = long(self._s_boxes[0][(xl & 0xFF000000) >> 24])
- f += long(self._s_boxes[1][(xl & 0x00FF0000) >> 16])
+ f = int(self._s_boxes[0][(xl & 0xFF000000) >> 24])
+ f += int(self._s_boxes[1][(xl & 0x00FF0000) >> 16])
f %= self._MODULUS
- f ^= long(self._s_boxes[2][(xl & 0x0000FF00) >> 8])
- f += long(self._s_boxes[3][(xl & 0x000000FF)])
+ f ^= int(self._s_boxes[2][(xl & 0x0000FF00) >> 8])
+ f += int(self._s_boxes[3][(xl & 0x000000FF)])
f %= self._MODULUS
return f & 0xFFFFFFFF
-
+
# Sample usage
##############
if __name__ == '__main__':
import time
-
+
def _demo(heading, source, encrypted, decrypted):
"""demo method """
- print heading
- print "\tSource: %(source)s" % {
+ print(heading)
+ print("\tSource: %(source)s" % {
'source': source,
- }
- print "\tEncrypted: %(encrypted)s" % {
+ })
+ print("\tEncrypted: %(encrypted)s" % {
'encrypted': encrypted,
- }
- print "\tDecrypted: %(decrypted)s" % {
+ })
+ print("\tDecrypted: %(decrypted)s" % {
'decrypted': decrypted,
- }
- print
-
- key = 'This is a test key'
+ })
+ print()
+
+ key = b'This is a test key'
cipher = Blowfish(key)
-
+
# Encryption processing
- (xl, xr) = (123456L, 654321L)
+ (xl, xr) = (123456, 654321)
(cl, cr) = cipher.cipher(xl, xr, cipher.ENCRYPT)
(dl, dr) = cipher.cipher(cl, cr, cipher.DECRYPT)
_demo("Testing encryption", (xl, xr), (cl, cr), (dl, dr))
-
+
# Block processing
- text = 'testtest'
+ text = b'testtest'
crypted = cipher.encrypt(text)
decrypted = cipher.decrypt(crypted)
_demo("Testing block encrypt", text, repr(crypted), decrypted)
-
+
# CTR ptocessing
cipher.initCTR()
- text = "The quick brown fox jumps over the lazy dog"
+ text = b"The quick brown fox jumps over the lazy dog"
crypted = cipher.encryptCTR(text)
cipher.initCTR()
decrypted = cipher.decryptCTR(crypted)
_demo("Testing CTR logic", text, repr(crypted), decrypted)
-
+
# Test speed
- print "Testing speed"
- test_strings = [''.join(("The quick brown fox jumps over the lazy dog", str(i),)) for i in xrange(1000)]
+ print("Testing speed")
+ test_strings = [b''.join((b"The quick brown fox jumps over the lazy dog", bytes(i),)) for i in range(1000)]
n = 0
t1 = time.time()
while True:
@@ -575,8 +601,8 @@ def _demo(heading, source, encrypted, decrypted):
t2 = time.time()
if t2 - t1 >= 5.0:
break
- print "%(count)i encryptions in %(time)0.1f seconds: %(throughput)0.1f enc/s" % {
+ print("%(count)i encryptions in %(time)0.1f seconds: %(throughput)0.1f enc/s" % {
'count': n,
'time': t2 - t1,
'throughput': n / (t2 - t1),
- }
+ })
diff --git a/src/gmv/cmdline_utils.py b/src/gmv/cmdline_utils.py
index e195cd76..95d9e911 100755
--- a/src/gmv/cmdline_utils.py
+++ b/src/gmv/cmdline_utils.py
@@ -17,39 +17,41 @@
'''
+from __future__ import absolute_import, print_function
+
import argparse
import sys
-import gmv.log_utils as log_utils
+from . import log_utils
LOG = log_utils.LoggerFactory.get_logger('cmdline_utils')
class CmdLineParser(argparse.ArgumentParser): #pylint: disable=R0904
- """
+ """
Added service to OptionParser.
-
- Comments regarding usability of the lib.
+
+ Comments regarding usability of the lib.
By default you want to print the default in the help if you had them so the default formatter should print them
Also new lines are eaten in the epilogue strings. You would use an epilogue to show examples most of the time so you
- want to have the possiblity to go to a new line. There should be a way to format the epilogue differently from the rest
+ want to have the possiblity to go to a new line. There should be a way to format the epilogue differently from the rest
+
+ """
- """
-
BOOL_TRUE = ['yes', 'true', '1']
BOOL_FALSE = ['no', 'false', '0']
BOOL_VALS = BOOL_TRUE + BOOL_FALSE
-
- def __init__(self, *args, **kwargs):
- """ constructor """
+
+ def __init__(self, *args, **kwargs):
+ """ constructor """
argparse.ArgumentParser.__init__(self, *args, **kwargs) #pylint: disable=W0142
-
- # I like my help option message better than the default...
- #self.remove_option('-h')
- #self.add_option('-h', '--help', action='help', help='Show this message and exit.')
-
- self.epilogue = None
-
- @classmethod
+
+ # I like my help option message better than the default...
+ #self.remove_option('-h')
+ #self.add_option('-h', '--help', action='help', help='Show this message and exit.')
+
+ self.epilogue = None
+
+ @classmethod
def convert_to_boolean(cls, val):
"""
Convert yes, True, true, YES to boolean True and
@@ -62,65 +64,65 @@ def convert_to_boolean(cls, val):
return False
else:
raise Exception("val %s should be in %s to be convertible to a boolean." % (val, cls.BOOL_VALS))
-
- def print_help(self, out=sys.stderr):
- """
- Print the help message, followed by the epilogue (if set), to the
- specified output file. You can define an epilogue by setting the
- ``epilogue`` field.
-
+
+ def print_help(self, out=sys.stderr):
+ """
+ Print the help message, followed by the epilogue (if set), to the
+ specified output file. You can define an epilogue by setting the
+ ``epilogue`` field.
+
:param out: file desc where to write the usage message
-
- """
+
+ """
super(CmdLineParser, self).print_help(out)
- if self.epilogue:
- #print >> out, '\n%s' % textwrap.fill(self.epilogue, 100, replace_whitespace = False)
- print >> out, '\n%s' % self.epilogue
- out.flush()
-
- def show_usage(self, msg=None):
+ if self.epilogue:
+ #print >> out, '\n%s' % textwrap.fill(self.epilogue, 100, replace_whitespace = False)
+ print('\n%s' % self.epilogue, file=out)
+ out.flush()
+
+ def show_usage(self, msg=None):
"""
- Print usage message
+ Print usage message
+ """
+ self.die_with_usage(msg)
+
+ def die_with_usage(self, msg=None, exit_code=2):
+ """
+ Display a usage message and exit.
+
+ :Parameters:
+ msg : str
+ If not set to ``None`` (the default), this message will be
+ displayed before the usage message
+
+ exit_code : int
+ The process exit code. Defaults to 2.
+ """
+ if msg != None:
+ print(msg, file=sys.stderr)
+
+ self.print_help(sys.stderr)
+ sys.exit(exit_code)
+
+ def error(self, msg):
+ """
+ Overrides parent ``OptionParser`` class's ``error()`` method and
+ forces the full usage message on error.
"""
- self.die_with_usage(msg)
-
- def die_with_usage(self, msg=None, exit_code=2):
- """
- Display a usage message and exit.
-
- :Parameters:
- msg : str
- If not set to ``None`` (the default), this message will be
- displayed before the usage message
-
- exit_code : int
- The process exit code. Defaults to 2.
- """
- if msg != None:
- print >> sys.stderr, msg
-
- self.print_help(sys.stderr)
- sys.exit(exit_code)
-
- def error(self, msg):
- """
- Overrides parent ``OptionParser`` class's ``error()`` method and
- forces the full usage message on error.
- """
self.die_with_usage("%s: error: %s\n" % (self.prog, msg))
-
+
def message(self, msg):
"""
- Print a message
+ Print a message
"""
- print("%s: %s\n" % (self.prog, msg))
-
-
+ print(("%s: %s\n" % (self.prog, msg)))
+
+
SYNC_HELP_EPILOGUE = """Examples:
a) full synchronisation with email and password login
-#> gmvault --email foo.bar@gmail.com --passwd vrysecrtpasswd
+#> gmvault --email foo.bar@gmail.com --passwd vrysecrtpasswd
b) full synchronisation for german users that have to use googlemail instead of gmail
@@ -128,7 +130,7 @@ def message(self, msg):
c) restrict synchronisation with an IMAP request
-#> gmvault --imap-request 'Since 1-Nov-2011 Before 10-Nov-2011' --email foo.bar@gmail.com --passwd sosecrtpasswd
+#> gmvault --imap-request 'Since 1-Nov-2011 Before 10-Nov-2011' --email foo.bar@gmail.com --passwd sosecrtpasswd
"""
@@ -137,12 +139,12 @@ def test_command_parser():
Test the command parser
"""
#parser = argparse.ArgumentParser()
-
-
+
+
parser = CmdLineParser()
-
+
subparsers = parser.add_subparsers(help='commands')
-
+
# A sync command
sync_parser = subparsers.add_parser('sync', formatter_class=argparse.ArgumentDefaultsHelpFormatter, \
help='synchronize with given gmail account')
@@ -150,65 +152,65 @@ def test_command_parser():
sync_parser.add_argument('-l', '--email', action='store', dest='email', help='email to sync with')
# sync typ
sync_parser.add_argument('-t', '--type', action='store', default='full-sync', help='type of synchronisation')
-
+
sync_parser.add_argument("-i", "--imap-server", metavar = "HOSTNAME", \
help="Gmail imap server hostname. (default: imap.gmail.com)",\
dest="host", default="imap.gmail.com")
-
+
sync_parser.add_argument("-p", "--imap-port", metavar = "PORT", \
help="Gmail imap server port. (default: 993)",\
dest="port", default=993)
-
+
sync_parser.set_defaults(verb='sync')
-
+
sync_parser.epilogue = SYNC_HELP_EPILOGUE
-
+
# A restore command
restore_parser = subparsers.add_parser('restore', help='restore email to a given email account')
restore_parser.add_argument('email', action='store', help='email to sync with')
restore_parser.add_argument('--recursive', '-r', default=False, action='store_true',
help='Remove the contents of the directory, too',
)
-
+
restore_parser.set_defaults(verb='restore')
-
+
# A config command
config_parser = subparsers.add_parser('config', help='add/delete/modify properties in configuration')
config_parser.add_argument('dirname', action='store', help='New directory to create')
config_parser.add_argument('--read-only', default=False, action='store_true',
help='Set permissions to prevent writing to the directory',
)
-
+
config_parser.set_defaults(verb='config')
-
-
-
-
+
+
+
+
# global help
#print("================ Global Help (-h)================")
sys.argv = ['gmvault.py']
- print(parser.parse_args())
-
+ print((parser.parse_args()))
+
#print("================ Global Help (--help)================")
#sys.argv = ['gmvault.py', '--help']
#print(parser.parse_args())
-
+
#print("================ Sync Help (--help)================")
#sys.argv = ['gmvault.py', 'sync', '-h']
#print(parser.parse_args())
-
+
#sys.argv = ['gmvault.py', 'sync', 'guillaume.aubert@gmail.com', '--type', 'quick-sync']
-
+
#print(parser.parse_args())
#print("options = %s\n" % (options))
#print("args = %s\n" % (args))
-
+
if __name__ == '__main__':
-
+
test_command_parser()
-
-
+
+
diff --git a/src/gmv/collections_utils.py b/src/gmv/collections_utils.py
index 91d1210b..78a9faf4 100755
--- a/src/gmv/collections_utils.py
+++ b/src/gmv/collections_utils.py
@@ -16,7 +16,9 @@
along with this program. If not, see .
'''
+from __future__ import absolute_import
import collections
+from six.moves import map
## {{{ http://code.activestate.com/recipes/576669/ (r18)
class OrderedDict(dict, collections.MutableMapping):
@@ -91,34 +93,34 @@ class Map(object):
specific multimaps are subclassed. """
def __init__(self):
self._dict = {}
-
+
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, repr(self._dict))
-
+
__str__ = __repr__
-
+
def __getitem__(self, key):
return self._dict[key]
-
+
def __setitem__(self, key, value):
self._dict[key] = value
-
+
def __delitem__(self, key):
del self._dict[key]
def __len__(self):
return len(self._dict)
-
+
def remove(self, key, value): #pylint: disable=W0613
'''remove key from Map'''
del self._dict[key]
-
+
def keys(self):
'''returns list of keys'''
- return self._dict.keys()
-
+ return list(self._dict.keys())
+
def dict(self):
- """ Allows access to internal dictionary, if necessary. Caution: multimaps
+ """ Allows access to internal dictionary, if necessary. Caution: multimaps
will break if keys are not associated with proper container."""
return self._dict
@@ -127,13 +129,13 @@ class ListMultimap(Map):
def __init__(self):
super(ListMultimap, self).__init__()
self._dict = collections.defaultdict(list)
-
+
def __setitem__(self, key, value):
self._dict[key].append(value)
def __len__(self):
return len(self._dict)
-
+
def remove(self, key, value):
'''Remove key'''
self._dict[key].remove(value)
@@ -143,13 +145,13 @@ class SetMultimap(Map):
def __init__(self):
super(SetMultimap, self).__init__()
self._dict = collections.defaultdict(set)
-
+
def __setitem__(self, key, value):
self._dict[key].add(value)
def __len__(self):
return len(self._dict)
-
+
def remove(self, key, value):
'''remove key'''
self._dict[key].remove(value)
@@ -159,13 +161,13 @@ class DictMultimap(Map):
def __init__(self):
super(DictMultimap, self).__init__()
self._dict = collections.defaultdict(dict)
-
+
def __setitem__(self, key, value):
self._dict[key][value] = True
def __len__(self):
return len(self._dict)
-
+
def remove(self, key, value):
""" remove key"""
del self._dict[key][value]
diff --git a/src/gmv/conf/conf_helper.py b/src/gmv/conf/conf_helper.py
index 7efc79a2..9cf04e29 100755
--- a/src/gmv/conf/conf_helper.py
+++ b/src/gmv/conf/conf_helper.py
@@ -16,87 +16,89 @@
along with this program. If not, see .
'''
+from __future__ import absolute_import, print_function
+
import sys
import os
import re
import codecs
import gmv.conf.exceptions as exceptions
-import gmv.conf.utils.struct_parser as struct_parser
+import gmv.conf.utils.struct_parser as struct_parser
class ResourceError(Exception):
"""
- Base class for ressource exceptions
+ Base class for ressource exceptions
"""
def __init__(self, a_msg):
-
+
super(ResourceError, self).__init__(a_msg)
class Resource(object):
"""
Class read a ressource.
- It can be read first from the Command Line, then from the ENV as an env variable and finally from a conf file
+ It can be read first from the Command Line, then from the ENV as an env variable and finally from a conf file
"""
-
- def __init__(self, a_cli_argument=None, a_env_variable=None, a_conf_property=None):
- """
+
+ def __init__(self, a_cli_argument=None, a_env_variable=None, a_conf_property=None):
+ """
Default Constructor.
It is important to understand that there is precedence between the different ways to set the ressource:
- get from the command line if defined otherwise get from the Env variable if defined otherwise get from the conf file otherwise error
-
+
Args:
a_cli_argument : The command line argument name
a_env_variable : The env variable name used for this ressource
a_conf_property: It should be a tuple containing two elements (group,property)
"""
-
+
self._cli_arg = a_cli_argument.lower() if a_cli_argument is not None else None
self._env_var = a_env_variable.upper() if a_env_variable is not None else None
-
+
if a_conf_property is not None:
(self._conf_group, self._conf_property) = a_conf_property
else:
self._conf_group = None
self._conf_property = None
-
+
def set_cli_argument(self, a_cli_argument):
"""cli_argument setter"""
self._cli_arg = a_cli_argument.lower()
-
+
def set_env_variable(self, a_env_variable):
"""env_variable setter"""
self._env_var = a_env_variable
-
+
@classmethod
def _get_srandardized_cli_argument(cls, a_tostrip):
"""
- remove -- or - from the command line argument and add a -- prefix to standardize the cli argument
+ remove -- or - from the command line argument and add a -- prefix to standardize the cli argument
"""
the_str = a_tostrip
-
+
while the_str.startswith('-'):
the_str = the_str[1:]
-
+
return '--%s' % (the_str)
-
+
def _get_value_from_command_line(self):
"""
internal method for extracting the value from the command line.
All command line agruments must be lower case (unix style).
To Do support short and long cli args.
-
+
Returns:
the Value if defined otherwise None
"""
-
+
# check precondition
if self._cli_arg == None:
return None
-
+
the_s = Resource._get_srandardized_cli_argument(self._cli_arg)
-
+
# look for cliArg in sys argv
for arg in sys.argv:
if arg.lower() == the_s:
@@ -104,28 +106,28 @@ def _get_value_from_command_line(self):
#print "i = %d, val = %s\n"%(i,sys.argv[i])
if len(sys.argv) <= i:
# No more thing to read in the command line so quit
- print "Resource: Commandline argument %s has no value\n" % (self._cli_arg)
- return None
+ print("Resource: Commandline argument %s has no value\n" % (self._cli_arg))
+ return None
else:
#print "i+1 = %d, val = %s\n"%(i+1,sys.argv[i+1])
return sys.argv[i+1]
-
+
def _get_value_from_env(self):
"""
internal method for extracting the value from the env.
All support ENV Variables should be in uppercase.
-
+
Returns:
the Value if defined otherwise None
"""
-
+
# precondition
if self._env_var == None:
return None
-
+
return os.environ.get(self._env_var, None)
-
+
def _get_from_conf(self):
"""
Try to read the info from the Configuration if possible
@@ -133,24 +135,24 @@ def _get_from_conf(self):
if self._conf_group and self._conf_property:
if Conf.can_be_instanciated():
return Conf.get_instance().get(self._conf_group, self._conf_property)
-
+
return None
-
-
+
+
def get_value(self, a_raise_exception=True):
"""
Return the value of the Resource as a string.
- get from the command line if defined otherwise get from the Env variable if defined otherwise get from the conf file otherwise error
-
+
Arguments:
aRaiseException: flag indicating if an exception should be raise if value not found
Returns:
value of the Resource as a String
-
+
Raises:
exception CTBTOError if the aRaiseExceptionOnError flag is activated
"""
-
+
# get a value using precedence rule 1) command-line, 2) ENV, 3) Conf
val = self._get_value_from_command_line()
if val is None:
@@ -158,47 +160,47 @@ def get_value(self, a_raise_exception=True):
if val is None:
val = self._get_from_conf()
if (val is None) and a_raise_exception:
-
+
the_str = "Cannot find "
add_nor = 0
-
+
if self._cli_arg is not None:
the_str += "commandline argument %s" % (self._cli_arg)
add_nor += 1
-
+
if self._env_var is not None:
-
+
if add_nor > 0:
the_str += ", nor "
-
+
the_str += "the Env Variable %s" % (self._env_var)
add_nor += 1
-
+
if self._conf_group is not None:
if add_nor > 0:
the_str += ", nor "
-
+
the_str += "the Conf Group:[%s] and Property=%s" % (self._conf_group, self._conf_property)
add_nor += 1
-
+
if add_nor == 0:
the_str += " any defined commandline argument, nor any env variable or"\
" Conf group and properties. They are all None, fatal error"
else:
the_str += ". One of them should be defined"
-
+
raise ResourceError(the_str)
-
+
return val
-
+
def _get(self, conv):
"""
Private _get method used to convert to the right expected type (int,float or boolean).
Strongly inspired by ConfigParser.py
-
+
Returns:
value converted into the asked type
-
+
Raises:
exception ValueError if conversion issue
"""
@@ -207,10 +209,10 @@ def _get(self, conv):
def get_value_as_int(self):
"""
Return the value as an int
-
+
Returns:
value converted into the asked type
-
+
Raises:
exception ValueError if conversion issue
"""
@@ -219,10 +221,10 @@ def get_value_as_int(self):
def get_value_as_float(self):
"""
Return the value as a float
-
+
Returns:
value converted into the asked type
-
+
Raises:
exception ValueError if conversion issue
"""
@@ -234,16 +236,16 @@ def get_value_as_float(self):
def get_value_as_boolean(self):
"""
Return the value as a boolean
-
+
Returns:
value converted into the asked type
-
+
Raises:
exception ValueError if conversion issue
"""
val = self.get_value()
if val.lower() not in self._boolean_states:
- raise ValueError, 'Not a boolean: %s' % val
+ raise ValueError('Not a boolean: %s' % val)
return self._boolean_states[val.lower()]
class MockConf(object):
@@ -255,36 +257,36 @@ def __init__(self, use_resource=True):
default constructor
"""
pass
-
+
@classmethod
def get(cls, section, option, default=None, fail_if_missing=False): #pylint: disable=W0613
""" get one option from a section.
"""
return default
-
+
@classmethod
def print_content(cls, substitute_values = True):#pylint: disable=W0613
""" print all the options variables substituted.
-
+
:param a_substitue_vals: bool for substituting values
:returns: the string containing all sections and variables
"""
- raise exceptions.Error("Not implemented in MockupConf")
+ raise exceptions.Error("Not implemented in MockupConf")
@classmethod
def items(cls, section):#pylint: disable=W0613
""" return all items from a section. Items is a list of tuples (option,value)
-
+
Args:
section. The section where to find the option
-
+
Returns: a list of tuples (option,value)
-
+
Raises:
exception NoSectionError if the section cannot be found
"""
- raise exceptions.Error("Not implemented in MockupConf")
-
+ raise exceptions.Error("Not implemented in MockupConf")
+
@classmethod
def getint(cls, section, option, default=0, fail_if_missing=False):#pylint: disable=W0613
"""Return the int value of the option.
@@ -293,7 +295,7 @@ def getint(cls, section, option, default=0, fail_if_missing=False):#pylint: disa
@classmethod
def getfloat(cls, section, option, default=0, fail_if_missing=False):#pylint: disable=W0613
- """Return the float value of the option.
+ """Return the float value of the option.
Default value is 0, None value can't be used as default value"""
return default
@@ -301,12 +303,12 @@ def getfloat(cls, section, option, default=0, fail_if_missing=False):#pylint: di
def getboolean(cls, section, option, default=False, fail_if_missing=False):#pylint: disable=W0613
"""get bool value """
return default
-
+
@classmethod
def get_list(cls, section, option, default=None, fail_if_missing=False):#pylint: disable=W0613
""" get a list of string, int """
return default
-
+
@classmethod
def getlist(cls, section, option, default=None, fail_if_missing=False):#pylint: disable=W0613
""" Deprecated, use get_list instead"""
@@ -316,89 +318,89 @@ def getlist(cls, section, option, default=None, fail_if_missing=False):#pylint:
def getdict(cls, section, option, default=None, fail_if_missing=False):#pylint: disable=W0613
""" Deprecated, use get_dict instead"""
return cls.get_dict(section, option, default, fail_if_missing)
-
-
+
+
@classmethod
def get_dict(cls, section, option, default=None, fail_if_missing=False):#pylint: disable=W0613
""" get a dict """
return default
-
+
class Conf(object):
""" Configuration Object with a several features:
-
+
* get configuration info in different types
* support for import
* support for variables in configuration file
* support for default values in all accessors
* integrated with the resources object offering to get the configuration from an env var, a commandline option or the conf
- * to be done : support for blocs, list comprehension and dict comprehension, json
+ * to be done : support for blocs, list comprehension and dict comprehension, json
* to be done : define resources in the conf using the [Resource] group with A= { ENV:TESTVAR, CLI:--testvar, VAL:1.234 }
-
+
"""
# command line and env resource stuff
CLINAME = "--conf_file"
- ENVNAME = "CONF_FILE"
-
+ ENVNAME = "CONF_FILE"
+
#class member
_instance = None
-
+
_CLIGROUP = "CLI"
_ENVGROUP = "ENV"
_MAX_INCLUDE_DEPTH = 10
-
+
@classmethod
def get_instance(cls):
""" singleton method """
if cls._instance == None:
cls._instance = Conf()
return cls._instance
-
+
@classmethod
def can_be_instanciated(cls):
- """Class method used by the Resource to check that the Conf can be instantiated.
-
- These two objects have a special contract as they are strongly coupled.
+ """Class method used by the Resource to check that the Conf can be instantiated.
+
+ These two objects have a special contract as they are strongly coupled.
A Resource can use the Conf to check for a Resource and the Conf uses a Resource to read Conf filepath.
-
+
:returns: True if the Conf file has got a file.
-
+
:except Error: Base Conf Error
-
+
"""
#No conf info passed to the resource so the Resource will not look into the conf (to avoid recursive search)
the_res = Resource(cls.CLINAME, cls.ENVNAME)
-
+
filepath = the_res.get_value(a_raise_exception=False)
-
+
if (filepath is not None) and os.path.exists(filepath):
return True
-
+
return False
-
-
+
+
def __init__(self, use_resource=True):
"""
Constructor
"""
-
+
# create resource for the conf file
self._conf_resource = Resource(Conf.CLINAME, Conf.ENVNAME)
-
+
# list of sections
self._sections = {}
-
+
self._configuration_file_path = None
-
- # create config object
- if use_resource:
+
+ # create config object
+ if use_resource:
self._load_config()
def _load_config(self, a_file=None):
""" _load the configuration file """
- try:
+ try:
# get it from a Resource if not files are passed
if a_file is None:
- a_file = self._conf_resource.get_value()
+ a_file = self._conf_resource.get_value()
if a_file is None:
raise exceptions.Error("Conf. Error, need a configuration file path")
@@ -409,29 +411,29 @@ def _load_config(self, a_file=None):
# memorize conf file path
self._configuration_file_path = a_file
- except Exception, exce:
- print "Can't read the config file %s" % a_file
- print "Current executing from dir = %s\n" % os.getcwd()
+ except Exception as exce:
+ print("Can't read the config file %s" % a_file)
+ print("Current executing from dir = %s\n" % os.getcwd())
raise exce
def get_conf_file_path(self):
"""return conf_file_path"""
return self._configuration_file_path if self._configuration_file_path != None else "unknown"
-
+
def sections(self):
"""Return a list of section names, excluding [DEFAULT]"""
# self._sections will never have [DEFAULT] in it
- return self._sections.keys()
-
+ return list(self._sections.keys())
+
@classmethod
def _get_defaults(cls, section, option, default, fail_if_missing):
""" To manage defaults.
Args:
default. The default value to return if fail_if_missing is False
fail_if_missing. Throw an exception when the option is not found and fail_if_missing is true
-
+
Returns: default if fail_if_missing is False
-
+
Raises:
exception NoOptionError if fail_if_missing is True
"""
@@ -442,26 +444,26 @@ def _get_defaults(cls, section, option, default, fail_if_missing):
return str(default)
else:
return None
-
+
def get(self, section, option, default=None, fail_if_missing=False):
""" get one option from a section.
-
+
return the default if it is not found and if fail_if_missing is False, otherwise return NoOptionError
-
+
:param section: Section where to find the option
:type section: str
:param option: Option to get
:param default: Default value to return if fail_if_missing is False
:param fail_if_missing: Will throw an exception when the option is not found and fail_if_missing is true
-
+
:returns: the option as a string
-
+
:except NoOptionError: Raised only when fail_is_missing set to True
-
+
"""
# all options are kept in lowercase
opt = self.optionxform(option)
-
+
if section not in self._sections:
#check if it is a ENV section
dummy = None
@@ -477,17 +479,17 @@ def get(self, section, option, default=None, fail_if_missing=False):
return self._replace_vars(self._sections[section][opt], "%s[%s]" % (section, option), - 1)
else:
return self._get_defaults(section, opt, default, fail_if_missing)
-
-
+
+
def print_content(self, substitute_values = True):
""" print all the options variables substituted.
-
+
:param a_substitue_vals: bool for substituting values
:returns: the string containing all sections and variables
"""
-
+
result_str = ""
-
+
for section_name in self._sections:
result_str += "[%s]\n" % (section_name)
section = self._sections[section_name]
@@ -497,20 +499,20 @@ def print_content(self, substitute_values = True):
result_str += "%s = %s\n" % (option, self.get(section_name, option))
else:
result_str += "%s = %s\n" % (option, self._sections[section_name][option])
-
+
result_str += "\n"
-
+
return result_str
-
+
def items(self, section):
""" return all items from a section. Items is a list of tuples (option,value)
-
+
Args:
section. The section where to find the option
-
+
Returns: a list of tuples (option,value)
-
+
Raises:
exception NoSectionError if the section cannot be found
"""
@@ -521,9 +523,9 @@ def items(self, section):
# remove __name__ from d
if "__name__" in a_copy:
del a_copy["__name__"]
-
- return a_copy.items()
-
+
+ return list(a_copy.items())
+
except KeyError:
raise exceptions.NoSectionError(section)
@@ -534,35 +536,35 @@ def has_option(self, section, option):
option = self.optionxform(option)
has_option = (option in self._sections[section])
return has_option
-
+
def has_section(self, section):
"""Check for the existence of a given section in the configuration."""
has_section = False
if section in self._sections:
has_section = True
return has_section
-
+
@classmethod
def _get_closing_bracket_index(cls, index, the_str, location, lineno):
""" private method used by _replace_vars to count the closing brackets.
-
+
Args:
index. The index from where to look for a closing bracket
s. The string to parse
group. group and options that are substituted. Mainly used to create a nice exception message
option. option that is substituted. Mainly used to create a nice exception message
-
+
Returns: the index of the found closing bracket
-
+
Raises:
exception NoSectionError if the section cannot be found
"""
-
+
tolook = the_str[index + 2:]
-
+
opening_brack = 1
closing_brack_index = index + 2
-
+
i = 0
for _ch in tolook:
if _ch == ')':
@@ -570,63 +572,63 @@ def _get_closing_bracket_index(cls, index, the_str, location, lineno):
return closing_brack_index
else:
opening_brack -= 1
-
+
elif _ch == '(':
if tolook[i - 1] == '%':
opening_brack += 1
-
+
# inc index
closing_brack_index += 1
i += 1
-
+
raise exceptions.SubstitutionError(lineno, location, "Missing a closing bracket in %s" % (tolook))
# very permissive regex
_SUBSGROUPRE = re.compile(r"%\((?P\w*)\[(?P(.*))\]\)")
-
+
def _replace_vars(self, a_str, location, lineno= - 1):
""" private replacing all variables. A variable will be in the from of %(group[option]).
Multiple variables are supported, ex /foo/%(group1[opt1])/%(group2[opt2])/bar
Nested variables are also supported, ex /foo/%(group[%(group1[opt1]].
Note that the group part cannot be substituted, only the option can. This is because of the Regular Expression _SUBSGROUPRE that accepts only words as values.
-
+
Args:
index. The index from where to look for a closing bracket
s. The string to parse
-
+
Returns: the final string with the replacements
-
+
Raises:
exception NoSectionError if the section cannot be found
"""
-
+
toparse = a_str
-
+
index = toparse.find("%(")
-
+
# if found opening %( look for end bracket)
if index >= 0:
# look for closing brackets while counting openings one
closing_brack_index = self._get_closing_bracket_index(index, a_str, location, lineno)
-
+
#print "closing bracket %d"%(closing_brack_index)
var = toparse[index:closing_brack_index + 1]
-
+
dummy = None
-
+
matched = self._SUBSGROUPRE.match(var)
-
+
if matched == None:
raise exceptions.SubstitutionError(lineno, location, \
"Cannot match a group[option] in %s "\
"but found an opening bracket (. Malformated expression " \
% (var))
else:
-
+
# recursive calls
group = self._replace_vars(matched.group('group'), location, - 1)
option = self._replace_vars(matched.group('option'), location, - 1)
-
+
try:
# if it is in ENVGROUP then check ENV variables with a Resource object
# if it is in CLIGROUP then check CLI argument with a Resource object
@@ -639,16 +641,16 @@ def _replace_vars(self, a_str, location, lineno= - 1):
dummy = res.get_value()
else:
dummy = self._sections[group][self.optionxform(option)]
- except KeyError, _: #IGNORE:W0612
+ except KeyError as _: #IGNORE:W0612
raise exceptions.SubstitutionError(lineno, location, "Property %s[%s] "\
"doesn't exist in this configuration file \n" \
% (group, option))
-
+
toparse = toparse.replace(var, dummy)
-
- return self._replace_vars(toparse, location, - 1)
- else:
- return toparse
+
+ return self._replace_vars(toparse, location, - 1)
+ else:
+ return toparse
def _get(self, section, conv, option, default, fail_if_missing):
@@ -659,19 +661,19 @@ def getint(self, section, option, default=0, fail_if_missing=False):
"""Return the int value of the option.
Default value is 0, None value can't be used as default value"""
return self._get(section, int, option, default, fail_if_missing)
-
+
def get_int(self, section, option, default=0, fail_if_missing=False):
"""Return the int value of the option.
Default value is 0, None value can't be used as default value"""
return self._get(section, int, option, default, fail_if_missing)
def getfloat(self, section, option, default=0, fail_if_missing=False):
- """Return the float value of the option.
+ """Return the float value of the option.
Default value is 0, None value can't be used as default value"""
return self._get(section, float, option, default, fail_if_missing)
-
+
def get_float(self, section, option, default=0, fail_if_missing=False):
- """Return the float value of the option.
+ """Return the float value of the option.
Default value is 0, None value can't be used as default value"""
return self._get(section, float, option, default, fail_if_missing)
@@ -679,31 +681,31 @@ def get_float(self, section, option, default=0, fail_if_missing=False):
'0': False, 'no': False, 'false': False, 'off': False}
def getboolean(self, section, option, default=False, fail_if_missing=False):
- """getboolean value"""
+ """getboolean value"""
val = self.get(section, option, default, fail_if_missing)
if val.lower() not in self._boolean_states:
- raise ValueError, 'Not a boolean: %s' % val
+ raise ValueError('Not a boolean: %s' % val)
return self._boolean_states[val.lower()]
-
+
def get_boolean(self, section, option, default=False, fail_if_missing=False):
"""get_boolean value"""
val = self.get(section, option, default, fail_if_missing)
if val.lower() not in self._boolean_states:
- raise ValueError, 'Not a boolean: %s' % val
+ raise ValueError('Not a boolean: %s' % val)
return self._boolean_states[val.lower()]
-
+
def get_list(self, section, option, default=None, fail_if_missing=False):
""" get a list of string, int """
-
+
val = self.get(section, option, default, fail_if_missing)
-
+
# parse it and return an error if invalid
try:
compiler = struct_parser.Compiler()
return compiler.compile_list(val)
- except struct_parser.CompilerError, err:
+ except struct_parser.CompilerError as err:
raise exceptions.Error(err.message)
-
+
def getlist(self, section, option, default=None, fail_if_missing=False):
""" Deprecated, use get_list instead"""
return self.get_list(section, option, default, fail_if_missing)
@@ -711,25 +713,25 @@ def getlist(self, section, option, default=None, fail_if_missing=False):
def getdict(self, section, option, default=None, fail_if_missing=False):
""" Deprecated, use get_dict instead"""
return self.get_dict(section, option, default, fail_if_missing)
-
-
+
+
def get_dict(self, section, option, default=None, fail_if_missing=False):
""" get a dict """
-
+
val = self.get(section, option, default, fail_if_missing)
-
+
# parse it and return an error if invalid
try:
compiler = struct_parser.Compiler()
return compiler.compile_dict(val)
- except struct_parser.CompilerError, err:
+ except struct_parser.CompilerError as err:
raise exceptions.Error(err.message)
-
+
@classmethod
def optionxform(cls, optionstr):
"""optionxform"""
return optionstr.lower()
-
+
#
# Regular expressions for parsing section headers and options.
#
@@ -746,22 +748,22 @@ def optionxform(cls, optionstr):
# by any # space/tab
r'(?P.*)$' # everything up to eol
)
-
+
def _read_include(self, lineno, line, origin, depth):
- """_read_include"""
- # Error if depth is MAX_INCLUDE_DEPTH
+ """_read_include"""
+ # Error if depth is MAX_INCLUDE_DEPTH
if depth >= Conf._MAX_INCLUDE_DEPTH:
raise exceptions.IncludeError("Error. Cannot do more than %d nested includes."\
" It is probably a mistake as you might have created a loop of includes" \
% (Conf._MAX_INCLUDE_DEPTH))
-
+
# remove %include from the path and we should have a path
i = line.find('%include')
-
+
#check if there is a < for including config files from a different format
#position after include
i = i + 8
-
+
# include file with a specific reading module
if line[i] == '<':
dummy = line[i+1:].strip()
@@ -773,7 +775,7 @@ def _read_include(self, lineno, line, origin, depth):
else:
group_name = None
the_format = dummy[:f_i].strip()
-
+
the_list = the_format.split(':')
if len(the_list) != 2 :
raise exceptions.IncludeError("Error. The mode and the group_name are not in the include line no %s: %s."\
@@ -783,21 +785,21 @@ def _read_include(self, lineno, line, origin, depth):
the_format, group_name = the_list
#strip the group name
group_name = group_name.strip()
-
+
path = dummy[f_i+1:].strip()
-
+
# replace variables if there are any
path = self._replace_vars(path, line, lineno)
-
+
raise exceptions.IncludeError("External Module reading not enabled in this ConfHelper")
#self._read_with_module(group_name, format, path, origin)
else:
- # normal include
- path = line[i:].strip()
-
+ # normal include
+ path = line[i:].strip()
+
# replace variables if there are any
path = self._replace_vars(path, line, lineno)
-
+
# check if file exits
if not os.path.exists(path):
raise exceptions.IncludeError("the config file to include %s does not exits" % (path), origin)
diff --git a/src/gmv/conf/conf_tests.py b/src/gmv/conf/conf_tests.py
index e932f231..8d89a874 100755
--- a/src/gmv/conf/conf_tests.py
+++ b/src/gmv/conf/conf_tests.py
@@ -19,6 +19,8 @@
'''
# unit tests part
+from __future__ import absolute_import, print_function
+
import unittest
import sys
import os
@@ -29,17 +31,17 @@
class TestConf(unittest.TestCase): #pylint: disable=R0904
"""
Test Class for the Conf Object
- """
+ """
@classmethod
def _get_tests_dir_path(cls):
""" get the org.ctbto.conf.tests path depending on where it is defined """
-
+
fmod_path = gmv.conf.__path__
-
+
test_dir = "%s/tests" % fmod_path[0]
-
+
return test_dir
-
+
def setUp(self): #pylint: disable=C0103
# necessary for the include with the VAR ENV substitution
os.environ["DIRCONFENV"] = TestConf._get_tests_dir_path()
@@ -54,96 +56,96 @@ def tearDown(self): #pylint: disable=C0103
if os.path.exists('/tmp/fake_conf.config'):
os.remove('/tmp/fake_conf.config')
-
+
def test_empty(self):
"""
Do nothing
"""
pass
-
+
def test_get_objects(self):
"""testGetObjects: test getter from all types """
# get simple string
astring = self.conf.get("GroupTest1", "astring")
-
+
self.assertEqual(astring,"oracle.jdbc.driver.OracleDriver")
-
+
# get an int
aint = self.conf.getint("GroupTest1", "aint")
-
+
self.assertEqual(aint, 10)
-
+
# get floatcompile the statements
afloat = self.conf.getfloat("GroupTest1", "afloat")
-
+
self.assertEqual(afloat, 5.24)
-
+
# get different booleans form
abool1 = self.conf.getboolean("GroupTest1", "abool1")
-
+
self.assertEqual(abool1, True)
-
+
abool2 = self.conf.getboolean("GroupTest1", "abool2")
-
+
self.assertEqual(abool2, False)
-
+
abool3 = self.conf.getboolean("GroupTest1", "abool3")
-
+
self.assertEqual(abool3, True)
-
+
abool4 = self.conf.getboolean("GroupTest1", "abool4")
-
+
self.assertEqual(abool4 , False)
-
+
def test_get_defaults(self):
"""testGetDefaults: test defaults values """
-
+
# get all defaults
astring = self.conf.get("GroupTest", "astring", "astring")
-
+
self.assertEqual(astring, "astring")
-
+
# get an default for int
aint = self.conf.getint("GroupTest", "aint", 2)
-
+
self.assertEqual(aint, 2)
-
+
# get float
afloat = self.conf.getfloat("GroupTest", "afloat", 10.541)
-
+
self.assertEqual(afloat, 10.541)
-
+
abool1 = self.conf.getboolean("GroupTest", "abool1", True)
-
+
self.assertEqual(abool1, True)
-
+
abool2 = self.conf.getboolean("GroupTest", "abool2", False)
-
+
self.assertEqual(abool2, False)
-
+
# existing group no option
abool5 = self.conf.getboolean("GroupTest1", "abool32", False)
-
+
self.assertEqual(abool5, False)
-
+
def test_var_substitutions(self):
"""testVarSubstitutions: test variables substitutions"""
-
+
# simple substitution
apath = self.conf.get("GroupTestVars", "path")
-
+
self.assertEqual(apath,"/foo/bar//tmp/foo/bar/bar/foo")
-
+
# multiple substitution
apath = self.conf.get("GroupTestVars", "path1")
-
+
self.assertEqual(apath,"/foo//tmp/foo/bar//foo/bar//tmp/foo/bar/bar/foo/bar")
-
+
# nested substitution
nested = self.conf.get("GroupTestVars", "nested")
-
- self.assertEqual(nested, "this is done")
-
+
+ self.assertEqual(nested, "this is done")
+
def test_include(self):
"""testInclude: test includes """
val = self.conf.get("IncludedGroup", "hello")
@@ -164,185 +166,185 @@ def ztest_use_conf_ENVNAME_resource(self): #pylint: disable=C0103
# need to setup the ENV containing the the path to the conf file:
os.environ[gmv.conf.conf_helper.Conf.ENVNAME] = "/tmp/fake_conf.config"
-
+
self.conf = gmv.conf.conf_helper.Conf.get_instance()
-
+
the_s = self.conf.get("MainDatabaseAccess", "driverClassName")
-
+
self.assertEqual(the_s, 'oracle.jdbc.driver.OracleDriver')
-
+
def test_read_from_CLI(self): #pylint: disable=C0103
"""testReadFromCLI: do substitutions from command line resources"""
#set environment
os.environ["TESTENV"] = "/tmp/foo/foo.bar"
-
+
val = self.conf.get("GroupTest1", "fromenv")
-
+
self.assertEqual(val, '/mydir//tmp/foo/foo.bar')
-
+
#set cli arg
sys.argv.append("--LongName")
sys.argv.append("My Cli Value")
-
+
val = self.conf.get("GroupTest1", "fromcli1")
-
+
self.assertEqual(val, 'My Cli Value is embedded')
-
+
#check with a more natural cli value
val = self.conf.get("GroupTest1", "fromcli2")
-
+
self.assertEqual(val, 'My Cli Value is embedded 2')
-
+
def test_read_from_ENV(self): #pylint: disable=C0103
"""testReadFromENV: do substitutions from ENV resources"""
#set environment
os.environ["TESTENV"] = "/tmp/foo/foo.bar"
-
+
val = self.conf.get("ENV", "TESTENV")
-
+
self.assertEqual(val, "/tmp/foo/foo.bar")
-
+
#set cli arg
sys.argv.append("--LongName")
sys.argv.append("My Cli Value")
-
+
val = self.conf.get("CLI", "LongName")
-
+
self.assertEqual(val, "My Cli Value")
-
+
# get a float from env
os.environ["TESTENV"] = "1.05"
-
+
val = self.conf.getfloat("ENV", "TESTENV")
-
- self.assertEqual(val+1, 2.05)
-
+
+ self.assertEqual(val+1, 2.05)
+
def test_print_content(self):
""" test print content """
-
+
#set environment
os.environ["TESTENV"] = "/tmp/foo/foo.bar"
-
+
#set cli arg
sys.argv.append("--LongName")
sys.argv.append("My Cli Value")
-
+
substitute_values = True
-
+
result = self.conf.print_content( substitute_values )
-
+
self.assertNotEqual(result, '')
-
+
def test_value_as_List(self): #pylint: disable=C0103
""" Value as List """
-
+
the_list = self.conf.getlist('GroupTestValueStruct', 'list')
-
+
self.assertEqual(the_list, ['a', 1, 3])
-
+
def test_value_as_unicodeList(self): #pylint: disable=C0103
""" Value as List """
-
+
the_list = self.conf.getlist('GroupTestValueStruct', 'unicode_list')
-
+
self.assertEqual(the_list, [ u'[Gmail]/Чаты', 'z' , 1 ])
-
+
def test_value_as_dict(self):
"""Dict as Value """
-
+
the_dict = self.conf.get_dict('GroupTestValueStruct', 'dict')
-
+
self.assertEqual(the_dict, {'a': 2, 'b': 3})
-
+
def test_complex_dict(self):
""" complex dict """
the_dict = self.conf.get_dict('GroupTestValueStruct', 'complex_dict')
-
+
self.assertEqual(the_dict, {'a': 2, 'c': {'a': 1, 'c': [1, 2, 3], 'b': [1, 2, 3, 4, 5, 6, 7]}, 'b': 3})
-
+
def test_dict_error(self):
""" error with a dict """
-
+
try:
self.conf.get_dict('GroupTestValueStruct', 'dict_error')
- except Exception, err:
+ except Exception as err:
self.assertEquals(err.message, "Expression \"{1:2,'v b': a\" cannot be converted as a dict.")
return
-
+
self.fail('Should never reach that point')
-
+
def test_list_error(self):
""" error with a list """
-
+
try:
the_list = self.conf.get_list('GroupTestValueStruct', 'list_error')
- print('the_list = %s\n' % (the_list))
- except Exception, err:
+ print(('the_list = %s\n' % (the_list)))
+ except Exception as err:
self.assertEquals(err.message, 'Unsupported token (type: @, value : OP) (line=1,col=3).')
return
-
+
self.fail('Should never reach that point')
-
+
class TestResource(unittest.TestCase): #pylint: disable=R0904
"""
Test Class for the Resource object
- """
+ """
def test_resource_simple_cli(self):
"""testResourceSimpleCli: read resource from CLI"""
# set command line
sys.argv.append("--LongName")
sys.argv.append("My Cli Value")
-
- resource = gmv.conf.conf_helper.Resource(a_cli_argument = "--LongName", a_env_variable = None)
-
+
+ resource = gmv.conf.conf_helper.Resource(a_cli_argument = "--LongName", a_env_variable = None)
+
self.assertEqual("My Cli Value", resource.get_value())
-
+
# look for LongName without --. It should be formalized by the Resource object
- resource = gmv.conf.conf_helper.Resource(a_cli_argument = "LongName", a_env_variable = None)
-
+ resource = gmv.conf.conf_helper.Resource(a_cli_argument = "LongName", a_env_variable = None)
+
self.assertEqual("My Cli Value", resource.get_value())
-
- def test_resource_from_env(self):
- """testResourceFromENV: read resource from ENV"""
- #ENV
+
+ def test_resource_from_env(self):
+ """testResourceFromENV: read resource from ENV"""
+ #ENV
os.environ["MYENVVAR"] = "My ENV Value"
-
+
resource = gmv.conf.conf_helper.Resource(a_cli_argument=None, a_env_variable="MYENVVAR")
-
+
self.assertEqual("My ENV Value", resource.get_value())
-
+
def ztest_resource_priority_rules(self):
- """testResourcePriorityRules: test priority rules"""
+ """testResourcePriorityRules: test priority rules"""
resource = gmv.conf.conf_helper.Resource(a_cli_argument="--LongName", a_env_variable="MYENVVAR")
-
+
self.assertEqual("My Cli Value", resource.get_value())
-
+
def test_resource_get_different_types(self): #pylint: disable=C0103
"""testResourceGetDifferentTypes: return resource in different types"""
-
+
os.environ["MYENVVAR"] = "yes"
resource = gmv.conf.conf_helper.Resource(a_cli_argument=None, a_env_variable="MYENVVAR")
-
+
self.assertEqual(resource.get_value_as_boolean(), True)
-
+
os.environ["MYENVVAR"] = "4"
-
+
resource = gmv.conf.conf_helper.Resource(a_cli_argument=None, a_env_variable="MYENVVAR")
-
+
self.assertEqual(resource.get_value_as_int()+1, 5)
-
+
os.environ["MYENVVAR"] = "4.345"
-
+
resource = gmv.conf.conf_helper.Resource(a_cli_argument=None, a_env_variable="MYENVVAR")
-
+
self.assertEqual(resource.get_value_as_float()+1, 5.345)
-
+
def tests():
""" global test method"""
#suite = unittest.TestLoader().loadTestsFromModule(gmv.conf.conf_tests)
suite = unittest.TestLoader().loadTestsFromTestCase(TestConf)
unittest.TextTestRunner(verbosity=2).run(suite)
-
-
+
+
if __name__ == '__main__':
tests()
diff --git a/src/gmv/conf/utils/struct_parser.py b/src/gmv/conf/utils/struct_parser.py
index 6b174147..13883231 100755
--- a/src/gmv/conf/utils/struct_parser.py
+++ b/src/gmv/conf/utils/struct_parser.py
@@ -16,37 +16,40 @@
along with this program. If not, see .
'''
+from __future__ import absolute_import, print_function
+
+import io
import tokenize
import token
-import StringIO
+import six
class TokenizerError(Exception):
"""Base class for All exceptions"""
def __init__(self, a_msg, a_line=None, a_col=None):
-
+
self._line = a_line
self._col = a_col
-
+
if self._line == None and self._col == None:
- extra = ""
+ extra = ""
else:
extra = "(line=%s,col=%s)" % (self._line, self._col)
-
+
super(TokenizerError, self).__init__("%s %s." % (a_msg, extra))
-
+
class Token(object):
""" Token class """
def __init__(self, a_type, num, value, begin, end, parsed_line):
-
+
self._type = a_type
self._num = num
self._value = value
self._begin = begin
self._end = end
self._parsed_line = parsed_line
-
+
@property
def type(self):
""" Return the token type """
@@ -61,433 +64,433 @@ def num(self):
def value(self):
""" Return the token value """
return self._value
-
+
@property
def begin(self):
""" Return the token begin """
return self._begin
-
+
@property
def end(self):
""" Return the token end """
return self._end
-
+
@property
def parsed_line(self):
""" Return the token line """
return self._parsed_line
-
+
def __repr__(self):
return "[type,num]=[%s,%s],value=[%s], parsed line=%s,[begin index,end index]=[%s,%s]" \
% (self._type, self._num, self._value, self._parsed_line, self._begin, self._end)
-
+
class Tokenizer(object):
- """
- Create tokens for parsing the grammar.
+ """
+ Create tokens for parsing the grammar.
This class is a wrapper around the python tokenizer adapt to the DSL that is going to be used.
- """
+ """
def __init__(self):
""" constructor """
# list of tokens
self._tokens = []
-
+
self._index = 0
-
+
self._current = None
-
+
def tokenize(self, a_program, a_eatable_token_types = ()):
""" parse the expression.
By default the parser eats space but some extra tokens by have to be eaten
-
+
Args:
a_expression: the expression to parser
-
+
Returns:
return dict containing the different parts of the request (spectrum, ....)
-
+
Raises:
exception TokenizerError if the syntax of the aString string is incorrect
"""
- g_info = tokenize.generate_tokens(StringIO.StringIO(a_program).readline) # tokenize the string
-
+ g_info = tokenize.generate_tokens(io.BytesIO(a_program).readline) # tokenize the string
+
for toknum, tokval, tokbeg, tokend, tokline in g_info:
if token.tok_name[toknum] not in a_eatable_token_types:
self._tokens.append(Token(token.tok_name[toknum], toknum, tokval, tokbeg, tokend, tokline))
-
-
-
+
+
+
def __iter__(self):
""" iterator implemented with a generator.
"""
for tok in self._tokens:
self._current = tok
yield tok
-
+
def next(self):
""" get next token.
-
+
Returns:
return next token
"""
-
+
self._current = self._tokens[self._index]
self._index += 1
return self._current
-
+
def has_next(self):
""" check it there are more tokens to consume.
-
+
Returns:
return True if more tokens to consume False otherwise
"""
return self._index < len(self._tokens)
-
+
def current_token(self):
""" return the latest consumed token.
-
+
Returns:
return the latest consumerd token
"""
return self._current
-
+
def consume_token(self, what):
""" consume the next token if it is what """
if self._current.value != what :
raise TokenizerError("Expected '%s' but instead found '%s'" % (what, self._current.value))
else:
- return self.next()
-
+ return next(self)
+
def consume_while_next_token_is_in(self, a_token_types_list):
"""
Consume the next tokens as long as they have one of the passed types.
This means that at least one token with one of the passed types needs to be matched.
-
+
Args:
a_token_types_list: the token types to consume
-
+
Returns:
- return the next non matching token
+ return the next non matching token
"""
-
+
self.consume_next_tokens(a_token_types_list)
-
+
while True:
-
- tok = self.next()
-
+
+ tok = next(self)
+
if tok.type not in a_token_types_list:
return tok
-
+
def consume_while_current_token_is_in(self, a_token_types_list): #pylint: disable=C0103
"""
Consume the tokens starting from the current token as long as they have one of the passed types.
It is a classical token eater. It eats tokens as long as they are the specified type
-
+
Args:
a_token_types_list: the token types to consume
-
+
Returns:
- return the next non matching token
+ return the next non matching token
"""
-
+
tok = self.current_token()
-
+
while tok.type in a_token_types_list:
- tok = self.next()
-
+ tok = next(self)
+
return tok
-
+
def consume_next_tokens(self, a_token_types_list):
"""
Consume one of the next token types given in the list and check that it is the expected type otherwise send an exception
-
+
Args:
- a_tokens_list: the token types to list
-
+ a_tokens_list: the token types to list
+
Returns:
- return next token
-
+ return next token
+
Raises:
exception BadTokenError if a Token Type that is not in a_token_types_list is found
"""
-
- tok = self.next()
-
+
+ tok = next(self)
+
if tok.type not in a_token_types_list:
raise TokenizerError("Expected '%s' but instead found '%s'" % (a_token_types_list, tok))
else:
return tok
-
+
def advance(self, inc=1):
""" return the next + inc token but do not consume it.
Useful to check future tokens.
-
+
Args:
a_expression: increment + 1 is the default (just look one step forward)
-
+
Returns:
return lookhead token
"""
return self._tokens[self._index-1 + inc]
-
+
class CompilerError(Exception):
"""Base class for All exceptions"""
def __init__(self, a_msg, a_line=None, a_col=None):
-
+
self._line = a_line
self._col = a_col
-
+
msg = ''
-
+
if self._line == None and self._col == None:
extra = ""
- msg = "%s." % (a_msg)
+ msg = "%s." % (a_msg)
else:
extra = "(line=%s,col=%s)" % (self._line, self._col)
msg = "%s %s." % (a_msg, extra)
-
+
super(CompilerError, self).__init__(msg)
-
+
class Compiler(object):
""" compile some python structures
"""
-
+
def __init__(self):
""" constructor """
-
+
#default tokens to ignore
self._tokens_to_ignore = ('INDENT', 'DEDENT', 'NEWLINE', 'NL')
-
+
def compile_list(self, a_to_compile_str):
""" compile a list object """
-
+
try:
tokenizer = Tokenizer()
tokenizer.tokenize(a_to_compile_str, self._tokens_to_ignore)
- except tokenize.TokenError, err:
-
- #translate this error into something understandable.
+ except tokenize.TokenError as err:
+
+ #translate this error into something understandable.
#It is because the bloody tokenizer counts the brackets
if err.args[0] == "EOF in multi-line statement":
raise CompilerError("Expression \"%s\" cannot be converted as a list" % (a_to_compile_str))
else:
raise CompilerError(err)
-
- print("Err = %s\n" % (err))
-
- tokenizer.next()
-
+
+ print(("Err = %s\n" % (err)))
+
+ next(tokenizer)
+
return self._compile_list(tokenizer)
-
+
def compile_dict(self, a_to_compile_str):
""" compile a dict object """
-
+
try:
tokenizer = Tokenizer()
tokenizer.tokenize(a_to_compile_str, self._tokens_to_ignore)
- except tokenize.TokenError, err:
-
- #translate this error into something understandable.
+ except tokenize.TokenError as err:
+
+ #translate this error into something understandable.
#It is because the bloody tokenizer counts the brackets
if err.args[0] == "EOF in multi-line statement":
raise CompilerError("Expression \"%s\" cannot be converted as a dict" % (a_to_compile_str))
else:
raise CompilerError(err)
-
- print("Err = %s\n" % (err))
-
- tokenizer.next()
-
+
+ print(("Err = %s\n" % (err)))
+
+ next(tokenizer)
+
return self._compile_dict(tokenizer)
def _compile_dict(self, a_tokenizer):
""" internal method for compiling a dict struct """
result = {}
-
+
the_token = a_tokenizer.current_token()
-
+
while the_token.type != 'ENDMARKER':
-
+
#look for an open bracket
if the_token.type == 'OP' and the_token.value == '{':
-
- the_token = a_tokenizer.next()
-
+
+ the_token = next(a_tokenizer)
+
while True:
-
+
if the_token.type == 'OP' and the_token.value == '}':
return result
else:
# get key values
(key, val) = self._compile_key_value(a_tokenizer)
- result[key] = val
-
+ result[key] = val
+
the_token = a_tokenizer.current_token()
-
+
else:
raise CompilerError("Unsupported token (type: %s, value : %s)" \
% (the_token.type, the_token.value), the_token.begin[0], the_token.begin[1])
-
+
#we should never reach that point (compilation error)
raise CompilerError("End of line reached without finding a list. The line [%s] cannot be transformed as a list" \
% (the_token.parsed_line))
-
+
def _compile_key_value(self, a_tokenizer):
""" look for the pair key value component of a dict """
-
+
the_token = a_tokenizer.current_token()
-
+
key = None
val = None
-
+
# get key
if the_token.type in ('STRING', 'NUMBER', 'NAME'):
-
+
#next the_token is in _compile_litteral
key = self._compile_litteral(a_tokenizer)
-
+
the_token = a_tokenizer.current_token()
-
+
else:
raise CompilerError("unexpected token (type: %s, value : %s)" \
% (the_token.type, the_token.value), \
- the_token.begin[0], the_token.begin[1])
-
+ the_token.begin[0], the_token.begin[1])
+
#should have a comma now
if the_token.type != 'OP' and the_token.value != ':':
raise CompilerError("Expected a token (type:OP, value: :) but instead got (type: %s, value: %s)" \
% (the_token.type, the_token.value), the_token.begin[0], the_token.begin[1])
else:
#eat it
- the_token = a_tokenizer.next()
-
+ the_token = next(a_tokenizer)
+
#get value
# it can be a
if the_token.type in ('STRING', 'NUMBER', 'NAME'):
#next the_token is in _compile_litteral
val = self._compile_litteral(a_tokenizer)
-
+
the_token = a_tokenizer.current_token()
-
+
#check for a list
elif the_token.value == '[' and the_token.type == 'OP':
-
+
# look for a list
val = self._compile_list(a_tokenizer)
-
+
# positioning to the next token
- the_token = a_tokenizer.next()
-
+ the_token = next(a_tokenizer)
+
elif the_token.value == '{' and the_token.type == 'OP':
-
+
# look for a dict
val = self._compile_dict(a_tokenizer)
-
+
# positioning to the next token
- the_token = a_tokenizer.next()
-
+ the_token = next(a_tokenizer)
+
elif the_token.value == '(' and the_token.type == 'OP':
-
+
# look for a dict
val = self._compile_tuple(a_tokenizer)
-
+
# positioning to the next token
- the_token = a_tokenizer.next()
-
+ the_token = next(a_tokenizer)
+
else:
raise CompilerError("unexpected token (type: %s, value : %s)" \
% (the_token.type, the_token.value), the_token.begin[0], \
- the_token.begin[1])
-
+ the_token.begin[1])
+
#if we have a comma then eat it as it means that we will have more than one values
if the_token.type == 'OP' and the_token.value == ',':
- the_token = a_tokenizer.next()
-
- return (key, val)
-
-
+ the_token = next(a_tokenizer)
+
+ return (key, val)
+
+
def _compile_litteral(self, a_tokenizer):
""" compile key. A key can be a NAME, STRING or NUMBER """
-
+
val = None
-
+
dummy = None
-
+
the_token = a_tokenizer.current_token()
-
+
while the_token.type not in ('OP', 'ENDMARKER'):
- if the_token.type == 'STRING':
+ if the_token.type == 'STRING':
#check if the string is unicode
if len(the_token.value) >= 3 and the_token.value[:2] == "u'":
#unicode string
#dummy = unicode(the_token.value[2:-1], 'utf_8') #decode from utf-8 encoding not necessary if read full utf-8 file
- dummy = unicode(the_token.value[2:-1])
+ dummy = six.text_type(the_token.value[2:-1])
else:
#ascii string
# the value contains the quote or double quotes so remove them always
dummy = the_token.value[1:-1]
-
+
elif the_token.type == 'NAME':
# intepret all non quoted names as a string
dummy = the_token.value
-
- elif the_token.type == 'NUMBER':
-
+
+ elif the_token.type == 'NUMBER':
+
dummy = self._create_number(the_token.value)
-
+
else:
raise CompilerError("unexpected token (type: %s, value : %s)" \
% (the_token.type, the_token.value), \
the_token.begin[0], the_token.begin[1])
-
+
#if val is not None, it has to be a string
if val:
val = '%s %s' % (str(val), str(dummy))
else:
val = dummy
-
- the_token = a_tokenizer.next()
-
+
+ the_token = next(a_tokenizer)
+
return val
-
-
+
+
def _compile_tuple(self, a_tokenizer):
""" process tuple structure """
result = []
-
+
open_bracket = 0
# this is the mode without [ & ] operator : 1,2,3,4
simple_list_mode = 0
-
+
the_token = a_tokenizer.current_token()
-
+
while the_token.type != 'ENDMARKER':
#look for an open bracket
if the_token.value == '(' and the_token.type == 'OP':
- #first time we open a bracket and not in simple mode
+ #first time we open a bracket and not in simple mode
if open_bracket == 0 and simple_list_mode == 0:
open_bracket += 1
#recurse to create the imbricated list
else:
result.append(self._compile_tuple(a_tokenizer))
-
- the_token = a_tokenizer.next()
-
+
+ the_token = next(a_tokenizer)
+
elif the_token.value == '{' and the_token.type == 'OP':
-
+
result.append(self._compile_dict(a_tokenizer))
-
- the_token = a_tokenizer.next()
-
+
+ the_token = next(a_tokenizer)
+
elif the_token.value == '[' and the_token.type == 'OP':
-
+
result.append(self._compile_list(a_tokenizer))
-
- the_token = a_tokenizer.next()
-
+
+ the_token = next(a_tokenizer)
+
elif the_token.type == 'OP' and the_token.value == ')':
# end of list return result
if open_bracket == 1:
@@ -500,69 +503,69 @@ def _compile_tuple(self, a_tokenizer):
# the comma case
elif the_token.type == 'OP' and the_token.value == ',':
# just eat it
- the_token = a_tokenizer.next()
-
+ the_token = next(a_tokenizer)
+
elif the_token.type in ('STRING', 'NUMBER', 'NAME'):
-
- # find values outside of a list
+
+ # find values outside of a list
# this can be okay
if open_bracket == 0:
simple_list_mode = 1
-
+
#next the_token is in _compile_litteral
result.append(self._compile_litteral(a_tokenizer))
-
+
the_token = a_tokenizer.current_token()
-
+
else:
raise CompilerError("Unsupported token (type: %s, value : %s)"\
% (the_token.value, the_token.type), \
the_token.begin[0], the_token.begin[1])
-
-
+
+
# if we are in simple_list_mode return list else error
if simple_list_mode == 1:
return tuple(result)
-
+
#we should never reach that point (compilation error)
raise CompilerError("End of line reached without finding a list. The line [%s] cannot be transformed as a tuple" \
% (the_token.parsed_line))
-
+
def _compile_list(self, a_tokenizer):
""" process a list structure """
result = []
-
-
+
+
open_bracket = 0
# this is the mode without [ & ] operator : 1,2,3,4
simple_list_mode = 0
-
+
the_token = a_tokenizer.current_token()
-
+
while the_token.type != 'ENDMARKER':
#look for an open bracket
if the_token.value == '[' and the_token.type == 'OP':
- #first time we open a bracket and not in simple mode
+ #first time we open a bracket and not in simple mode
if open_bracket == 0 and simple_list_mode == 0:
open_bracket += 1
#recurse to create the imbricated list
else:
result.append(self._compile_list(a_tokenizer))
-
- the_token = a_tokenizer.next()
-
+
+ the_token = next(a_tokenizer)
+
elif the_token.value == '(' and the_token.type == 'OP':
-
+
result.append(self._compile_tuple(a_tokenizer))
-
- the_token = a_tokenizer.next()
-
+
+ the_token = next(a_tokenizer)
+
elif the_token.value == '{' and the_token.type == 'OP':
-
+
result.append(self._compile_dict(a_tokenizer))
-
- the_token = a_tokenizer.next()
-
+
+ the_token = next(a_tokenizer)
+
elif the_token.type == 'OP' and the_token.value == ']':
# end of list return result
if open_bracket == 1:
@@ -574,39 +577,39 @@ def _compile_list(self, a_tokenizer):
# the comma case
elif the_token.type == 'OP' and the_token.value == ',':
# just eat it
- the_token = a_tokenizer.next()
-
+ the_token = next(a_tokenizer)
+
elif the_token.type in ('STRING', 'NUMBER', 'NAME'):
-
- # find values outside of a list
+
+ # find values outside of a list
# this can be okay
if open_bracket == 0:
simple_list_mode = 1
-
+
#next the_token is in _compile_litteral
result.append(self._compile_litteral(a_tokenizer))
-
+
the_token = a_tokenizer.current_token()
-
+
else:
raise CompilerError("Unsupported token (type: %s, value : %s)"\
% (the_token.value, the_token.type), \
the_token.begin[0], the_token.begin[1])
-
-
+
+
# if we are in simple_list_mode return list else error
if simple_list_mode == 1:
return result
-
+
#we should never reach that point (compilation error)
raise CompilerError("End of line reached without finding a list. The line [%s] cannot be transformed as a list" \
% (the_token.parsed_line))
-
+
@classmethod
def _create_number(cls, a_number):
- """ depending on the value return a int or a float.
+ """ depending on the value return a int or a float.
For the moment very simple: If there is . it is a float"""
-
+
if a_number.find('.') > 0:
return float(a_number)
else:
diff --git a/src/gmv/conf/utils/struct_parser_tests.py b/src/gmv/conf/utils/struct_parser_tests.py
index 14144af7..5fbdece2 100755
--- a/src/gmv/conf/utils/struct_parser_tests.py
+++ b/src/gmv/conf/utils/struct_parser_tests.py
@@ -18,6 +18,7 @@
'''
# unit tests part
+from __future__ import absolute_import
import unittest
from gmv.conf.utils.struct_parser import Compiler, CompilerError
@@ -89,7 +90,7 @@ def test_list_error(self):
try:
compiler.compile_list(the_string)
- except CompilerError, err:
+ except CompilerError as err:
self.assertEqual(err.message, 'Expression " a ]" cannot be converted as a list.')
def test_list_unicode_val(self):
@@ -126,7 +127,7 @@ def test_list_error_2(self):
try:
compiler.compile_list(the_string)
- except CompilerError, err:
+ except CompilerError as err:
self.assertEqual(err.message, 'Unsupported token (type: @, value : OP) (line=1,col=3).')
def test_simple_dict(self):
@@ -148,7 +149,7 @@ def test_dict_error(self):
try:
compiler.compile_dict(the_string)
- except CompilerError, err:
+ except CompilerError as err:
self.assertEqual(err.message, 'Expression "{\'a\':1, b:2 " cannot be converted as a dict.')
def test_dict_with_list(self):
diff --git a/src/gmv/credential_utils.py b/src/gmv/credential_utils.py
index dc08c7ed..9e543733 100755
--- a/src/gmv/credential_utils.py
+++ b/src/gmv/credential_utils.py
@@ -20,18 +20,18 @@
and xauth part of gyb http://code.google.com/p/got-your-back/source/browse/trunk/gyb.py
'''
+from __future__ import absolute_import
import webbrowser
import json
import base64
-import urllib #for urlencode
-import urllib2
import os
import getpass
-import gmv.log_utils as log_utils
-import gmv.blowfish as blowfish
-import gmv.gmvault_utils as gmvault_utils
+from . import log_utils
+from . import blowfish
+from . import gmvault_utils
+from six.moves import input, urllib
LOG = log_utils.LoggerFactory.get_logger('credential_utils')
@@ -62,7 +62,7 @@ class CredentialHelper(object):
Helper handling all credentials
"""
SECRET_FILEPATH = '%s/token.sec'
-
+
@classmethod
def get_secret_key(cls, a_filepath):
"""
@@ -74,7 +74,7 @@ def get_secret_key(cls, a_filepath):
else:
secret = gmvault_utils.make_password()
- fdesc = os.open(a_filepath, os.O_CREAT|os.O_WRONLY, 0600)
+ fdesc = os.open(a_filepath, os.O_CREAT|os.O_WRONLY, 0o600)
try:
the_bytes = os.write(fdesc, secret)
finally:
@@ -84,24 +84,24 @@ def get_secret_key(cls, a_filepath):
raise Exception("Error: Cannot write secret in %s" % a_filepath)
return secret
-
+
@classmethod
def store_passwd(cls, email, passwd):
"""
Encrypt and store gmail password
"""
passwd_file = '%s/%s.passwd' % (gmvault_utils.get_home_dir_path(), email)
-
- fdesc = os.open(passwd_file, os.O_CREAT|os.O_WRONLY, 0600)
-
+
+ fdesc = os.open(passwd_file, os.O_CREAT|os.O_WRONLY, 0o600)
+
cipher = blowfish.Blowfish(cls.get_secret_key(cls.SECRET_FILEPATH % (gmvault_utils.get_home_dir_path())))
cipher.initCTR()
-
+
encrypted = cipher.encryptCTR(passwd)
the_bytes = os.write(fdesc, encrypted)
-
+
os.close(fdesc)
-
+
if the_bytes < len(encrypted):
raise Exception("Error: Cannot write password in %s" % (passwd_file))
@@ -153,7 +153,7 @@ def read_oauth2_tok_sec(cls, email):
try:
with open(user_oauth_file_path) as oauth_file:
oauth_result = json.load(oauth_file)
- except Exception, _: #pylint: disable-msg=W0703
+ except Exception as _: #pylint: disable-msg=W0703
LOG.critical("Cannot read oauth credentials from %s. Force oauth credentials renewal." % user_oauth_file_path)
LOG.critical("=== Exception traceback ===")
LOG.critical(gmvault_utils.get_exception_traceback())
@@ -197,24 +197,24 @@ def get_credential(cls, args, test_mode={'activate': False, 'value': 'test_passw
#first check that there is an email
if not args.get('email', None):
raise Exception("No email passed, Need to pass an email")
-
- if args['passwd'] in ['empty', 'store', 'renew']:
- # --passwd is here so look if there is a passwd in conf file
+
+ if args['passwd'] in ['empty', 'store', 'renew']:
+ # --passwd is here so look if there is a passwd in conf file
# or go in interactive mode
-
+
LOG.critical("Authentication performed with Gmail password.\n")
-
+
passwd = cls.read_password(args['email'])
-
+
#password to be renewed so need an interactive phase to get the new pass
if not passwd or args['passwd'] in ['renew', 'store']: # go to interactive mode
if not test_mode.get('activate', False):
passwd = getpass.getpass('Please enter gmail password for %s and press ENTER:' % (args['email']))
else:
passwd = test_mode.get('value', 'no_password_given')
-
+
credential = { 'type' : 'passwd', 'value' : passwd}
-
+
#store it in dir if asked for --store-passwd or --renew-passwd
if args['passwd'] in ['renew', 'store']:
LOG.critical("Store password for %s in $HOME/.gmvault." % (args['email']))
@@ -223,7 +223,7 @@ def get_credential(cls, args, test_mode={'activate': False, 'value': 'test_passw
else:
LOG.critical("Use password stored in $HOME/.gmvault dir (Storing your password here is not recommended).")
credential = { 'type' : 'passwd', 'value' : passwd, 'option':'read' }
-
+
# use oauth2
elif args['passwd'] in ('not_seen', None) and args['oauth2'] in (None, 'empty', 'renew', 'not_seen'):
# get access token and refresh token
@@ -261,8 +261,8 @@ def _get_oauth2_acc_tok_from_ref_tok(cls, refresh_token):
request_url = '%s/%s' % (account_base_url, 'o/oauth2/token')
try:
- response = urllib2.urlopen(request_url, urllib.urlencode(params)).read()
- except Exception, err: #pylint: disable-msg=W0703
+ response = urllib.request.urlopen(request_url, urllib.parse.urlencode(params)).read()
+ except Exception as err: #pylint: disable-msg=W0703
LOG.critical("Error: Problems when trying to connect to Google oauth2 endpoint: %s.\n" % (request_url))
raise err
@@ -300,8 +300,8 @@ def _get_authorization_tokens(cls, authorization_code):
request_url = '%s/%s' % (account_base_url, 'o/oauth2/token')
try:
- response = urllib2.urlopen(request_url, urllib.urlencode(params)).read()
- except Exception, err: #pylint: disable-msg=W0703
+ response = urllib.request.urlopen(request_url, urllib.parse.urlencode(params)).read()
+ except Exception as err: #pylint: disable-msg=W0703
LOG.critical("Error: Problems when trying to connect to Google oauth2 endpoint: %s." % (request_url))
raise err
@@ -317,7 +317,7 @@ def _get_oauth2_tokens(cls, email, use_webbrowser = False, debug=False):
permission_url = generate_permission_url()
#message to indicate that a browser will be opened
- raw_input('gmvault will now open a web browser page in order for you to grant gmvault access to your Gmail.\n'\
+ input('gmvault will now open a web browser page in order for you to grant gmvault access to your Gmail.\n'\
'Please make sure you\'re logged into the correct Gmail account (%s) before granting access.\n'\
'Press ENTER to open the browser.' % (email))
@@ -325,17 +325,17 @@ def _get_oauth2_tokens(cls, email, use_webbrowser = False, debug=False):
if use_webbrowser:
try:
webbrowser.open(str(permission_url))
- except Exception, err: #pylint: disable-msg=W0703
+ except Exception as err: #pylint: disable-msg=W0703
LOG.critical("Error: %s.\n" % (err) )
LOG.critical("=== Exception traceback ===")
LOG.critical(gmvault_utils.get_exception_traceback())
LOG.critical("=== End of Exception traceback ===\n")
- verification_code = raw_input("You should now see the web page on your browser now.\n"\
+ verification_code = input("You should now see the web page on your browser now.\n"\
"If you don\'t, you can manually open:\n\n%s\n\nOnce you've granted"\
" gmvault access, enter the verification code and press enter:\n" % (permission_url))
else:
- verification_code = raw_input('Please log in and/or grant access via your browser at %s '
+ verification_code = input('Please log in and/or grant access via your browser at %s '
'then enter the verification code and press enter:' % (permission_url))
#request access and refresh token with the obtained verification code
diff --git a/src/gmv/gmv_cmd.py b/src/gmv/gmv_cmd.py
index f245aada..ec96360a 100755
--- a/src/gmv/gmv_cmd.py
+++ b/src/gmv/gmv_cmd.py
@@ -17,6 +17,8 @@
along with this program. If not, see .
'''
+from __future__ import absolute_import, print_function
+
import socket
import sys
import datetime
@@ -26,14 +28,14 @@
import argparse
import imaplib
-import gmv.log_utils as log_utils
-import gmv.gmvault_utils as gmvault_utils
-import gmv.gmvault as gmvault
-import gmv.gmvault_export as gmvault_export
-import gmv.collections_utils as collections_utils
-from gmv.cmdline_utils import CmdLineParser
-from gmv.credential_utils import CredentialHelper
+from . import log_utils
+from . import gmvault_utils
+from . import gmvault
+from . import gmvault_export
+from . import collections_utils
+from .cmdline_utils import CmdLineParser
+from .credential_utils import CredentialHelper
GMVAULT_VERSION = gmvault_utils.GMVAULT_VERSION
@@ -50,11 +52,11 @@
REST_HELP_EPILOGUE = """Examples:
-a) Complete restore of your gmail account (backed up in ~/gmvault-db) into anewfoo.bar@gmail.com
+a) Complete restore of your gmail account (backed up in ~/gmvault-db) into anewfoo.bar@gmail.com
#> gmvault restore -d ~/gmvault-db anewfoo.bar@gmail.com
-b) Quick restore (restore only the last 2 months to make regular updates) of your gmail account into anewfoo.bar@gmail.com
+b) Quick restore (restore only the last 2 months to make regular updates) of your gmail account into anewfoo.bar@gmail.com
#> gmvault restore --type quick -d ~/gmvault-db foo.bar@gmail.com
@@ -133,7 +135,7 @@ class GMVaultLauncher(object):
"""
GMVault launcher handling the command parsing
"""
-
+
SYNC_TYPES = ['full', 'quick', 'custom']
RESTORE_TYPES = ['full', 'quick']
CHECK_TYPES = ['full']
@@ -143,9 +145,9 @@ class GMVaultLauncher(object):
('maildir', gmvault_export.OfflineIMAP),
('mbox', gmvault_export.MBox)])
EXPORT_TYPE_NAMES = ", ".join(EXPORT_TYPES)
-
+
DEFAULT_GMVAULT_DB = "%s/gmvault-db" % (os.getenv("HOME", "."))
-
+
def __init__(self):
""" constructor """
super(GMVaultLauncher, self).__init__()
@@ -157,13 +159,13 @@ def _create_parser(self): #pylint: disable=R0915
Return the created parser
"""
parser = CmdLineParser()
-
+
parser.epilogue = GLOBAL_HELP_EPILOGUE
parser.add_argument("-v", '--version', action='version', version='Gmvault v%s' % (GMVAULT_VERSION))
-
+
subparsers = parser.add_subparsers(title='subcommands', help='valid subcommands.')
-
+
# A sync command
sync_parser = subparsers.add_parser('sync', \
help='synchronize with a given gmail account.')
@@ -174,11 +176,11 @@ def _create_parser(self): #pylint: disable=R0915
sync_parser.add_argument('-t', '-type', '--type', \
action='store', dest='type', \
default='full', help='type of synchronisation: full|quick|custom. (default: full)')
-
+
sync_parser.add_argument("-d", "--db-dir", \
action='store', help="Database root directory. (default: $HOME/gmvault-db)",\
dest="db_dir", default= self.DEFAULT_GMVAULT_DB)
-
+
# for both when seen add const empty otherwise not_seen
# this allow to distinguish between an empty value and a non seen option
sync_parser.add_argument("-y", "--oauth2", \
@@ -196,11 +198,11 @@ def _create_parser(self): #pylint: disable=R0915
sync_parser.add_argument("--renew-passwd", \
help="renew the stored password via an interactive authentication session. (not recommended)",
action= 'store_const' , dest="passwd", const='renew')
-
+
sync_parser.add_argument("--store-passwd", \
help="use interactive password authentication, encrypt and store the password. (not recommended)",
action= 'store_const' , dest="passwd", const='store')
-
+
#sync_parser.add_argument("-r", "--imap-req", type = get_unicode_commandline_arg, metavar = "REQ", \
# help="Imap request to restrict sync.",\
# dest="imap_request", default=None)
@@ -208,98 +210,98 @@ def _create_parser(self): #pylint: disable=R0915
sync_parser.add_argument("-r", "--imap-req", metavar = "REQ", \
help="Imap request to restrict sync.",\
dest="imap_request", default=None)
-
+
sync_parser.add_argument("-g", "--gmail-req", metavar = "REQ", \
help="Gmail search request to restrict sync as defined in"\
"https://support.google.com/mail/bin/answer.py?hl=en&answer=7190",\
dest="gmail_request", default=None)
-
+
# activate the resume mode --restart is deprecated
sync_parser.add_argument("--resume", "--restart", \
action='store_true', dest='restart', \
default=False, help= 'Resume the sync action from the last saved gmail id.')
-
+
# activate the resume mode --restart is deprecated
sync_parser.add_argument("--emails-only", \
action='store_true', dest='only_emails', \
default=False, help= 'Only sync emails.')
-
+
# activate the resume mode --restart is deprecated
sync_parser.add_argument("--chats-only", \
action='store_true', dest='only_chats', \
default=False, help= 'Only sync chats.')
-
+
sync_parser.add_argument("-e", "--encrypt", \
help="encrypt stored email messages in the database.",\
action='store_true',dest="encrypt", default=False)
-
+
sync_parser.add_argument("-c", "--check-db", metavar = "VAL", \
help="enable/disable the removal from the gmvault db of the emails "\
"that have been deleted from the given gmail account. VAL = yes or no.",\
dest="db_cleaning", default=None)
-
+
sync_parser.add_argument("-m", "--multiple-db-owner", \
help="Allow the email database to be synchronized with emails from multiple accounts.",\
action='store_true',dest="allow_mult_owners", default=False)
-
+
# activate the restart mode
sync_parser.add_argument("--no-compression", \
action='store_false', dest='compression', \
default=True, help= 'disable email storage compression (gzip).')
-
+
sync_parser.add_argument("--server", metavar = "HOSTNAME", \
action='store', help="Gmail imap server hostname. (default: imap.gmail.com)",\
dest="host", default="imap.gmail.com")
-
+
sync_parser.add_argument("--port", metavar = "PORT", \
action='store', help="Gmail imap server port. (default: 993)",\
dest="port", default=993)
-
+
sync_parser.add_argument("--debug", "-debug", \
action='store_true', help="Activate debugging info",\
dest="debug", default=False)
-
-
+
+
sync_parser.set_defaults(verb='sync')
-
+
sync_parser.epilogue = SYNC_HELP_EPILOGUE
-
+
# restore command
rest_parser = subparsers.add_parser('restore', \
help='restore gmvault-db to a given email account.')
#email argument can be optional so it should be an option
rest_parser.add_argument('email', \
action='store', default='empty_$_email', help='email account to restore.')
-
+
# restore typ
rest_parser.add_argument('-t', '-type', '--type', \
action='store', dest='type', \
default='full', help='type of restoration: full|quick. (default: full)')
-
+
# add a label
rest_parser.add_argument('-a', '--apply-label' , \
action='store', dest='apply_label', \
default=None, help='Apply a label to restored emails')
-
+
# activate the resume mode --restart is deprecated
rest_parser.add_argument("--resume", "--restart", \
action='store_true', dest='restart', \
default=False, help= 'Restart from the last saved gmail id.')
-
+
# activate the resume mode --restart is deprecated
rest_parser.add_argument("--emails-only", \
action='store_true', dest='only_emails', \
default=False, help= 'Only sync emails.')
-
+
# activate the resume mode --restart is deprecated
rest_parser.add_argument("--chats-only", \
action='store_true', dest='only_chats', \
default=False, help= 'Only sync chats.')
-
+
rest_parser.add_argument("-d", "--db-dir", \
action='store', help="Database root directory. (default: $HOME/gmvault-db)",\
dest="db_dir", default= self.DEFAULT_GMVAULT_DB)
-
+
# for both when seen add const empty otherwise not_seen
# this allow to distinguish between an empty value and a non seen option
rest_parser.add_argument("-y", "--oauth2", \
@@ -317,19 +319,19 @@ def _create_parser(self): #pylint: disable=R0915
rest_parser.add_argument("--server", metavar = "HOSTNAME", \
action='store', help="Gmail imap server hostname. (default: imap.gmail.com)",\
dest="host", default="imap.gmail.com")
-
+
rest_parser.add_argument("--port", metavar = "PORT", \
action='store', help="Gmail imap server port. (default: 993)",\
dest="port", default=993)
-
+
rest_parser.add_argument("--debug", "-debug", \
action='store_true', help="Activate debugging info",\
dest="debug", default=False)
-
+
rest_parser.set_defaults(verb='restore')
-
+
rest_parser.epilogue = REST_HELP_EPILOGUE
-
+
# check_db command
check_parser = subparsers.add_parser('check', \
help='check and clean the gmvault-db disk database.')
@@ -337,11 +339,11 @@ def _create_parser(self): #pylint: disable=R0915
#email argument
check_parser.add_argument('email', \
action='store', default='empty_$_email', help='gmail account against which to check.')
-
+
check_parser.add_argument("-d", "--db-dir", \
action='store', help="Database root directory. (default: $HOME/gmvault-db)",\
dest="db_dir", default= self.DEFAULT_GMVAULT_DB)
-
+
# for both when seen add const empty otherwise not_seen
# this allow to distinguish between an empty value and a non seen option
check_parser.add_argument("-y", "--oauth2", \
@@ -359,17 +361,17 @@ def _create_parser(self): #pylint: disable=R0915
check_parser.add_argument("--server", metavar = "HOSTNAME", \
action='store', help="Gmail imap server hostname. (default: imap.gmail.com)",\
dest="host", default="imap.gmail.com")
-
+
check_parser.add_argument("--port", metavar = "PORT", \
action='store', help="Gmail imap server port. (default: 993)",\
dest="port", default=993)
-
+
check_parser.add_argument("--debug", "-debug", \
action='store_true', help="Activate debugging info",\
dest="debug", default=False)
-
+
check_parser.set_defaults(verb='check')
-
+
# export command
export_parser = subparsers.add_parser('export', \
help='Export the gmvault-db database to another format.')
@@ -394,11 +396,11 @@ def _create_parser(self): #pylint: disable=R0915
dest="debug", default=False)
export_parser.set_defaults(verb='export')
-
+
export_parser.epilogue = EXPORT_HELP_EPILOGUE
return parser
-
+
@classmethod
def _parse_common_args(cls, options, parser, parsed_args, list_of_types = []): #pylint:disable=W0102
"""
@@ -406,21 +408,21 @@ def _parse_common_args(cls, options, parser, parsed_args, list_of_types = []): #
"""
#add email
parsed_args['email'] = options.email
-
+
parsed_args['debug'] = options.debug
-
+
parsed_args['restart'] = options.restart
-
+
#user entered both authentication methods
if options.passwd == 'empty' and (options.oauth2_token == 'empty'):
parser.error('You have to use one authentication method. '\
'Please choose between OAuth2 and password (recommend OAuth2).')
-
+
# user entered no authentication methods => go to default oauth
if options.passwd == 'not_seen' and options.oauth2_token == 'not_seen':
#default to xoauth
options.oauth2_token = 'empty'
-
+
# add passwd
parsed_args['passwd'] = options.passwd
@@ -439,54 +441,54 @@ def _parse_common_args(cls, options, parser, parsed_args, list_of_types = []): #
else:
parser.error('Unknown type for command %s. The type should be one of %s' \
% (parsed_args['command'], list_of_types))
-
+
#add db_dir
parsed_args['db-dir'] = options.db_dir
- LOG.critical("Use gmvault-db located in %s.\n" % (parsed_args['db-dir']))
-
+ LOG.critical("Use gmvault-db located in %s.\n" % (parsed_args['db-dir']))
+
# add host
parsed_args['host'] = options.host
-
+
#convert to int if necessary
port_type = type(options.port)
-
+
try:
if port_type == type('s') or port_type == type("s"):
port = int(options.port)
else:
port = options.port
- except Exception, _: #pylint:disable=W0703
+ except Exception as _: #pylint:disable=W0703
parser.error("--port option %s is not a number. Please check the port value" % (port))
-
+
# add port
parsed_args['port'] = port
-
+
return parsed_args
-
+
def parse_args(self): #pylint: disable=R0912
- """ Parse command line arguments
-
+ """ Parse command line arguments
+
:returns: a dict that contains the arguments
-
+
:except Exception Error
-
+
"""
parser = self._create_parser()
-
+
options = parser.parse_args()
-
+
LOG.debug("Namespace = %s\n" % (options))
-
+
parsed_args = { }
-
+
parsed_args['command'] = options.verb
-
+
if parsed_args.get('command', '') == 'sync':
-
+
# parse common arguments for sync and restore
self._parse_common_args(options, parser, parsed_args, self.SYNC_TYPES)
-
+
# handle the search requests (IMAP or GMAIL dialect)
if options.imap_request and options.gmail_request:
parser.error('Please use only one search request type. You can use --imap-req or --gmail-req.')
@@ -497,69 +499,69 @@ def parse_args(self): #pylint: disable=R0912
parsed_args['request'] = { 'type': 'gmail', 'req' : self._clean_imap_or_gm_request(options.gmail_request)}
else:
parsed_args['request'] = { 'type':'imap', 'req' : self._clean_imap_or_gm_request(options.imap_request)}
-
+
# handle emails or chats only
if options.only_emails and options.only_chats:
parser.error("--emails-only and --chats-only cannot be used together. Please choose one.")
-
+
parsed_args['emails_only'] = options.only_emails
parsed_args['chats_only'] = options.only_chats
-
+
# add db-cleaning
# if request passed put it False unless it has been forced by the user
# default is True (db-cleaning done)
- #default
+ #default
parsed_args['db-cleaning'] = True
-
+
# if there is a value then it is forced
- if options.db_cleaning:
+ if options.db_cleaning:
parsed_args['db-cleaning'] = parser.convert_to_boolean(options.db_cleaning)
-
+
#elif parsed_args['request']['req'] != 'ALL' and not options.db_cleaning:
# #else if we have a request and not forced put it to false
# parsed_args['db-cleaning'] = False
-
+
if parsed_args['db-cleaning']:
LOG.critical("Activate Gmvault db cleaning.")
else:
LOG.critical("Disable deletion of emails that are in Gmvault db and not anymore in Gmail.")
-
+
#add encryption option
parsed_args['encrypt'] = options.encrypt
#add ownership checking
parsed_args['ownership_control'] = not options.allow_mult_owners
-
+
#compression flag
parsed_args['compression'] = options.compression
-
-
+
+
elif parsed_args.get('command', '') == 'restore':
-
+
# parse common arguments for sync and restore
self._parse_common_args(options, parser, parsed_args, self.RESTORE_TYPES)
-
+
# apply restore labels if there is any
parsed_args['apply_label'] = options.apply_label
-
+
parsed_args['restart'] = options.restart
-
+
# handle emails or chats only
if options.only_emails and options.only_chats:
parser.error("--emails-only and --chats-only cannot be used together. Please choose one.")
-
+
parsed_args['emails_only'] = options.only_emails
parsed_args['chats_only'] = options.only_chats
-
+
elif parsed_args.get('command', '') == 'check':
-
+
#add defaults for type
options.type = 'full'
options.restart = False
-
+
# parse common arguments for sync and restore
self._parse_common_args(options, parser, parsed_args, self.CHECK_TYPES)
-
+
elif parsed_args.get('command', '') == 'export':
parsed_args['labels'] = options.label
parsed_args['db-dir'] = options.db_dir
@@ -572,12 +574,12 @@ def parse_args(self): #pylint: disable=R0912
elif parsed_args.get('command', '') == 'config':
pass
-
+
#add parser
parsed_args['parser'] = parser
-
+
return parsed_args
-
+
@classmethod
def _clean_imap_or_gm_request(cls, request):
"""
@@ -586,13 +588,13 @@ def _clean_imap_or_gm_request(cls, request):
If the request starts and ends with single quote eat them.
"""
LOG.debug("clean_imap_or_gm_request. original request = %s\n" % (request))
-
+
if request and (len(request) > 2) and (request[0] == "'" and request[-1] == "'"):
request = request[1:-1]
-
+
LOG.debug("clean_imap_or_gm_request. processed request = %s\n" % (request))
return request
-
+
@classmethod
def _export(cls, args):
"""
@@ -615,88 +617,88 @@ def _restore(cls, args, credential):
# Create a gmvault releaving read_only_access
restorer = gmvault.GMVaulter(args['db-dir'], args['host'], args['port'], \
args['email'], credential, read_only_access = False)
-
+
#full sync is the first one
if args.get('type', '') == 'full':
-
+
#call restore
labels = [args['apply_label']] if args['apply_label'] else []
restorer.restore(extra_labels = labels, restart = args['restart'], \
emails_only = args['emails_only'], chats_only = args['chats_only'])
-
+
elif args.get('type', '') == 'quick':
-
+
#take the last two to 3 months depending on the current date
-
+
# today - 2 months
today = datetime.date.today()
begin = today - datetime.timedelta(gmvault_utils.get_conf_defaults().getint("Restore", "quick_days", 8))
-
+
starting_dir = gmvault_utils.get_ym_from_datetime(begin)
-
+
#call restore
labels = [args['apply_label']] if args['apply_label'] else []
restorer.restore(pivot_dir = starting_dir, extra_labels = labels, restart = args['restart'], \
emails_only = args['emails_only'], chats_only = args['chats_only'])
-
+
else:
raise ValueError("Unknown synchronisation mode %s. Please use full (default), quick.")
-
+
#print error report
- LOG.critical(restorer.get_operation_report())
-
- @classmethod
+ LOG.critical(restorer.get_operation_report())
+
+ @classmethod
def _sync(cls, args, credential):
"""
Execute All synchronisation operations
"""
LOG.critical("Connect to Gmail server.\n")
-
+
# handle credential in all levels
syncer = gmvault.GMVaulter(args['db-dir'], args['host'], args['port'], \
args['email'], credential, read_only_access = True, \
use_encryption = args['encrypt'])
#full sync is the first one
if args.get('type', '') == 'full':
-
+
#choose full sync. Ignore the request
syncer.sync({ 'mode': 'full', 'type': 'imap', 'req': 'ALL' } , compress_on_disk = args['compression'], \
db_cleaning = args['db-cleaning'], ownership_checking = args['ownership_control'],\
restart = args['restart'], emails_only = args['emails_only'], chats_only = args['chats_only'])
-
+
elif args.get('type', '') == 'auto':
-
+
#choose auto sync. imap request = ALL and restart = True
syncer.sync({ 'mode': 'auto', 'type': 'imap', 'req': 'ALL' } , compress_on_disk = args['compression'], \
db_cleaning = args['db-cleaning'], ownership_checking = args['ownership_control'],\
restart = True, emails_only = args['emails_only'], chats_only = args['chats_only'])
-
+
elif args.get('type', '') == 'quick':
-
- #sync only the last x days (taken in defaults) in order to be quick
+
+ #sync only the last x days (taken in defaults) in order to be quick
#(cleaning is import here because recent days might move again
-
+
# today - 2 months
today = datetime.date.today()
begin = today - datetime.timedelta(gmvault_utils.get_conf_defaults().getint("Sync", "quick_days", 8))
-
+
LOG.critical("Quick sync mode. Check for new emails since %s." % (begin.strftime('%d-%b-%Y')))
-
+
# today + 1 day
end = today + datetime.timedelta(1)
-
+
req = { 'type' : 'imap', \
'req' : syncer.get_imap_request_btw_2_dates(begin, end), \
'mode' : 'quick'}
-
+
syncer.sync( req, \
compress_on_disk = args['compression'], \
db_cleaning = args['db-cleaning'], \
ownership_checking = args['ownership_control'], restart = args['restart'], \
emails_only = args['emails_only'], chats_only = args['chats_only'])
-
+
elif args.get('type', '') == 'custom':
-
+
#convert args to unicode
u_str = gmvault_utils.convert_argv_to_unicode(args['request']['req'])
args['request']['req'] = u_str
@@ -706,64 +708,64 @@ def _sync(cls, args, credential):
# pass an imap request. Assume that the user know what to do here
LOG.critical("Perform custom synchronisation with %s request: %s.\n" \
% (args['request']['type'], args['request']['req']))
-
+
syncer.sync(args['request'], compress_on_disk = args['compression'], db_cleaning = args['db-cleaning'], \
ownership_checking = args['ownership_control'], restart = args['restart'], \
emails_only = args['emails_only'], chats_only = args['chats_only'])
else:
raise ValueError("Unknown synchronisation mode %s. Please use full (default), quick or custom.")
-
-
+
+
#print error report
LOG.critical(syncer.get_operation_report())
-
+
@classmethod
def _check_db(cls, args, credential):
"""
Check DB
"""
LOG.critical("Connect to Gmail server.\n")
-
+
# handle credential in all levels
checker = gmvault.GMVaulter(args['db-dir'], args['host'], args['port'], \
args['email'], credential, read_only_access = True)
-
+
checker.check_clean_db(db_cleaning = True)
-
+
def run(self, args): #pylint:disable=R0912
"""
- Run the grep with the given args
+ Run the grep with the given args
"""
on_error = True
die_with_usage = True
-
+
try:
if args.get('command') not in ('export'):
credential = CredentialHelper.get_credential(args)
-
+
if args.get('command', '') == 'sync':
self._sync(args, credential)
-
+
elif args.get('command', '') == 'restore':
-
+
self._restore(args, credential)
-
+
elif args.get('command', '') == 'check':
-
+
self._check_db(args, credential)
-
+
elif args.get('command', '') == 'export':
self._export(args)
elif args.get('command', '') == 'config':
-
+
LOG.critical("Configure something. TBD.\n")
-
+
on_error = False
-
- except KeyboardInterrupt, _:
+
+ except KeyboardInterrupt as _:
LOG.critical("\nCTRL-C. Stop all operations.\n")
on_error = False
except socket.error:
@@ -773,7 +775,7 @@ def run(self, args): #pylint:disable=R0912
LOG.critical(gmvault_utils.get_exception_traceback())
LOG.critical("=== End of Exception traceback ===\n")
die_with_usage = False
- except imaplib.IMAP4.error, imap_err:
+ except imaplib.IMAP4.error as imap_err:
#bad login or password
if str(imap_err) in ['[AUTHENTICATIONFAILED] Invalid credentials (Failure)', \
'[ALERT] Web login required: http://support.google.com/'\
@@ -787,25 +789,25 @@ def run(self, args): #pylint:disable=R0912
LOG.critical("=== Exception traceback ===")
LOG.critical(gmvault_utils.get_exception_traceback())
LOG.critical("=== End of Exception traceback ===\n")
- except Exception, err:
+ except Exception as err:
LOG.critical("Error: %s. \n" % (err) )
LOG.critical("=== Exception traceback ===")
LOG.critical(gmvault_utils.get_exception_traceback())
LOG.critical("=== End of Exception traceback ===\n")
die_with_usage = False
- finally:
+ finally:
if on_error:
if die_with_usage:
args['parser'].die_with_usage()
sys.exit(1)
-
+
def init_logging():
"""
init logging infrastructure
- """
+ """
#setup application logs: one handler for stdout and one for a log file
- log_utils.LoggerFactory.setup_cli_app_handler(log_utils.STANDALONE, activate_log_file=False, file_path="./gmvault.log")
-
+ log_utils.LoggerFactory.setup_cli_app_handler(log_utils.STANDALONE, activate_log_file=False, file_path="./gmvault.log")
+
def activate_debug_mode():
"""
Activate debugging logging
@@ -821,8 +823,8 @@ def sigusr1_handler(signum, frame): #pylint:disable=W0613
filename = './gmvault.traceback.txt'
- print("GMVAULT: Received SIGUSR1 -- Printing stack trace in %s..." %
- os.path.abspath(filename))
+ print(("GMVAULT: Received SIGUSR1 -- Printing stack trace in %s..." %
+ os.path.abspath(filename)))
with open(filename, 'a') as f:
traceback.print_stack(file=f)
@@ -839,39 +841,39 @@ def setup_default_conf():
def bootstrap_run():
""" temporary bootstrap """
-
+
init_logging()
#force argv[0] to gmvault
sys.argv[0] = "gmvault"
-
+
LOG.critical("")
-
+
gmvlt = GMVaultLauncher()
-
+
args = gmvlt.parse_args()
#activate debug if enabled
if args['debug']:
LOG.critical("Activate debugging information.")
activate_debug_mode()
-
+
# force instanciation of conf to load the defaults
- gmvault_utils.get_conf_defaults()
-
+ gmvault_utils.get_conf_defaults()
+
gmvlt.run(args)
-
-
+
+
if __name__ == '__main__':
-
+
#import memdebug
-
+
#memdebug.start(8080)
#import sys
#print("sys.argv=[%s]" %(sys.argv))
-
+
register_traceback_signal()
-
+
bootstrap_run()
-
+
#sys.exit(0)
diff --git a/src/gmv/gmvault.py b/src/gmv/gmvault.py
index c127698e..69c731cb 100755
--- a/src/gmv/gmvault.py
+++ b/src/gmv/gmvault.py
@@ -16,6 +16,7 @@
along with this program. If not, see .
'''
+from __future__ import absolute_import
import json
import time
import datetime
@@ -23,17 +24,17 @@
import itertools
import imaplib
-import gmv.log_utils as log_utils
-import gmv.collections_utils as collections_utils
-import gmv.gmvault_utils as gmvault_utils
-import gmv.imap_utils as imap_utils
-import gmv.gmvault_db as gmvault_db
+from . import log_utils
+from . import collections_utils
+from . import gmvault_utils
+from . import imap_utils
+from . import gmvault_db
LOG = log_utils.LoggerFactory.get_logger('gmvault')
def handle_restore_imap_error(the_exception, gm_id, db_gmail_ids_info, gmvaulter):
"""
- function to handle restore IMAPError and OSError([Errno 2] No such file or directory) in restore functions
+ function to handle restore IMAPError and OSError([Errno 2] No such file or directory) in restore functions
"""
if isinstance(the_exception, imaplib.IMAP4.abort):
# if this is a Gmvault SSL Socket error quarantine the email and continue the restore
@@ -48,36 +49,36 @@ def handle_restore_imap_error(the_exception, gm_id, db_gmail_ids_info, gmvaulter
raise the_exception
elif isinstance(the_exception, IOError) and str(the_exception).find('[Errno 2] No such file or directory:') >=0:
LOG.critical("Quarantine email with gm id %s from %s. GMAIL IMAP cannot restore it:"\
- " err={%s}" % (gm_id, db_gmail_ids_info[gm_id], str(the_exception)))
+ " err={%s}" % (gm_id, db_gmail_ids_info[gm_id], str(the_exception)))
gmvaulter.gstorer.quarantine_email(gm_id)
gmvaulter.error_report['emails_in_quarantine'].append(gm_id)
LOG.critical("Disconnecting and reconnecting to restart cleanly.")
- gmvaulter.src.reconnect() #reconnect
-
- elif isinstance(the_exception, imaplib.IMAP4.error):
+ gmvaulter.src.reconnect() #reconnect
+
+ elif isinstance(the_exception, imaplib.IMAP4.error):
LOG.error("Catched IMAP Error %s" % (str(the_exception)))
LOG.exception(the_exception)
-
+
#When the email cannot be read from Database because it was empty when returned by gmail imap
#quarantine it.
if str(the_exception) == "APPEND command error: BAD ['Invalid Arguments: Unable to parse message']":
LOG.critical("Quarantine email with gm id %s from %s. GMAIL IMAP cannot restore it:"\
" err={%s}" % (gm_id, db_gmail_ids_info[gm_id], str(the_exception)))
gmvaulter.gstorer.quarantine_email(gm_id)
- gmvaulter.error_report['emails_in_quarantine'].append(gm_id)
+ gmvaulter.error_report['emails_in_quarantine'].append(gm_id)
else:
raise the_exception
elif isinstance(the_exception, imap_utils.PushEmailError):
LOG.error("Catch the following exception %s" % (str(the_exception)))
LOG.exception(the_exception)
-
+
if the_exception.quarantined():
LOG.critical("Quarantine email with gm id %s from %s. GMAIL IMAP cannot restore it:"\
" err={%s}" % (gm_id, db_gmail_ids_info[gm_id], str(the_exception)))
gmvaulter.gstorer.quarantine_email(gm_id)
- gmvaulter.error_report['emails_in_quarantine'].append(gm_id)
+ gmvaulter.error_report['emails_in_quarantine'].append(gm_id)
else:
- raise the_exception
+ raise the_exception
else:
LOG.error("Catch the following exception %s" % (str(the_exception)))
LOG.exception(the_exception)
@@ -87,10 +88,10 @@ def handle_sync_imap_error(the_exception, the_id, error_report, src):
"""
function to handle IMAPError in gmvault
type = chat or email
- """
+ """
if isinstance(the_exception, imaplib.IMAP4.abort):
- # imap abort error
- # ignore it
+ # imap abort error
+ # ignore it
# will have to do something with these ignored messages
LOG.critical("Error while fetching message with imap id %s." % (the_id))
LOG.critical("\n=== Exception traceback ===\n")
@@ -98,8 +99,8 @@ def handle_sync_imap_error(the_exception, the_id, error_report, src):
LOG.critical("=== End of Exception traceback ===\n")
try:
#try to get the gmail_id
- curr = src.fetch(the_id, imap_utils.GIMAPFetcher.GET_GMAIL_ID)
- except Exception, _: #pylint:disable-msg=W0703
+ curr = src.fetch(the_id, imap_utils.GIMAPFetcher.GET_GMAIL_ID)
+ except Exception as _: #pylint:disable-msg=W0703
curr = None
LOG.critical("Error when trying to get gmail id for message with imap id %s." % (the_id))
LOG.critical("Disconnect, wait for 10 sec then reconnect.")
@@ -109,53 +110,53 @@ def handle_sync_imap_error(the_exception, the_id, error_report, src):
time.sleep(10)
LOG.critical("Reconnecting ...")
src.connect()
-
+
if curr:
gmail_id = curr[the_id].get(imap_utils.GIMAPFetcher.GMAIL_ID)
else:
gmail_id = None
-
+
#add ignored id
error_report['cannot_be_fetched'].append((the_id, gmail_id))
-
+
LOG.critical("Forced to ignore message with imap id %s, (gmail id %s)." \
% (the_id, (gmail_id if gmail_id else "cannot be read")))
-
+
elif isinstance(the_exception, imaplib.IMAP4.error):
- # check if this is a cannot be fetched error
+ # check if this is a cannot be fetched error
# I do not like to do string guessing within an exception but I do not have any choice here
LOG.critical("Error while fetching message with imap id %s." % (the_id))
LOG.critical("\n=== Exception traceback ===\n")
LOG.critical(gmvault_utils.get_exception_traceback())
LOG.critical("=== End of Exception traceback ===\n")
-
+
#quarantine emails that have raised an abort error
if str(the_exception).find("'Some messages could not be FETCHed (Failure)'") >= 0:
try:
#try to get the gmail_id
LOG.critical("One more attempt. Trying to fetch the Gmail ID for %s" % (the_id) )
- curr = src.fetch(the_id, imap_utils.GIMAPFetcher.GET_GMAIL_ID)
- except Exception, _: #pylint:disable-msg=W0703
+ curr = src.fetch(the_id, imap_utils.GIMAPFetcher.GET_GMAIL_ID)
+ except Exception as _: #pylint:disable-msg=W0703
curr = None
-
+
if curr:
gmail_id = curr[the_id].get(imap_utils.GIMAPFetcher.GMAIL_ID)
else:
gmail_id = None
-
+
#add ignored id
error_report['cannot_be_fetched'].append((the_id, gmail_id))
-
+
LOG.critical("Ignore message with imap id %s, (gmail id %s)" % (the_id, (gmail_id if gmail_id else "cannot be read")))
-
+
else:
raise the_exception #rethrow error
else:
- raise the_exception
+ raise the_exception
class IMAPBatchFetcher(object):
"""
- Fetch IMAP data in batch
+ Fetch IMAP data in batch
"""
def __init__(self, src, imap_ids, error_report, request, default_batch_size = 100):
"""
@@ -165,124 +166,124 @@ def __init__(self, src, imap_ids, error_report, request, default_batch_size = 10
self.imap_ids = imap_ids
self.def_batch_size = default_batch_size
self.request = request
- self.error_report = error_report
-
+ self.error_report = error_report
+
self.to_fetch = list(imap_ids)
-
+
def individual_fetch(self, imap_ids):
"""
Find the imap_id creating the issue
return the data related to the imap_ids
"""
new_data = {}
- for the_id in imap_ids:
- try:
+ for the_id in imap_ids:
+ try:
single_data = self.src.fetch(the_id, self.request)
- new_data.update(single_data)
- except Exception, error:
+ new_data.update(single_data)
+ except Exception as error:
handle_sync_imap_error(error, the_id, self.error_report, self.src) #do everything in this handler
return new_data
-
+
def __iter__(self):
- return self
-
+ return self
+
def next(self):
"""
Return the next batch of elements
"""
new_data = {}
batch = self.to_fetch[:self.def_batch_size]
-
+
if len(batch) <= 0:
raise StopIteration
-
+
try:
-
+
new_data = self.src.fetch(batch, self.request)
-
+
self.to_fetch = self.to_fetch[self.def_batch_size:]
-
+
return new_data
- except imaplib.IMAP4.error, _:
- new_data = self.individual_fetch(batch)
-
+ except imaplib.IMAP4.error as _:
+ new_data = self.individual_fetch(batch)
+
return new_data
-
+
def reset(self):
"""
Restart from the beginning
"""
- self.to_fetch = self.imap_ids
-
+ self.to_fetch = self.imap_ids
+
class GMVaulter(object):
"""
Main object operating over gmail
- """
+ """
NB_GRP_OF_ITEMS = 1400
EMAIL_RESTORE_PROGRESS = 'email_last_id.restore'
CHAT_RESTORE_PROGRESS = 'chat_last_id.restore'
EMAIL_SYNC_PROGRESS = 'email_last_id.sync'
CHAT_SYNC_PROGRESS = 'chat_last_id.sync'
-
+
OP_EMAIL_RESTORE = "EM_RESTORE"
OP_EMAIL_SYNC = "EM_SYNC"
OP_CHAT_RESTORE = "CH_RESTORE"
OP_CHAT_SYNC = "CH_SYNC"
-
+
OP_TO_FILENAME = { OP_EMAIL_RESTORE : EMAIL_RESTORE_PROGRESS,
OP_EMAIL_SYNC : EMAIL_SYNC_PROGRESS,
OP_CHAT_RESTORE : CHAT_RESTORE_PROGRESS,
OP_CHAT_SYNC : CHAT_SYNC_PROGRESS
}
-
-
+
+
def __init__(self, db_root_dir, host, port, login, \
credential, read_only_access = True, use_encryption = False): #pylint:disable-msg=R0913,R0914
"""
constructor
- """
+ """
self.db_root_dir = db_root_dir
-
+
#create dir if it doesn't exist
gmvault_utils.makedirs(self.db_root_dir)
-
+
#keep track of login email
self.login = login
-
+
# create source and try to connect
self.src = imap_utils.GIMAPFetcher(host, port, login, credential, \
readonly_folder = read_only_access)
-
+
self.src.connect()
-
+
LOG.debug("Connected")
-
+
self.use_encryption = use_encryption
-
+
#to report gmail imap problems
self.error_report = { 'empty' : [] ,
'cannot_be_fetched' : [],
'emails_in_quarantine' : [],
'reconnections' : 0,
'key_error' : []}
-
+
#instantiate gstorer
self.gstorer = gmvault_db.GmailStorer(self.db_root_dir, self.use_encryption)
-
+
#timer used to mesure time spent in the different values
self.timer = gmvault_utils.Timer()
-
+
@classmethod
def get_imap_request_btw_2_dates(cls, begin_date, end_date):
"""
Return the imap request for those 2 dates
"""
imap_req = 'Since %s Before %s' % (gmvault_utils.datetime2imapdate(begin_date), gmvault_utils.datetime2imapdate(end_date))
-
+
return imap_req
-
+
def get_operation_report(self):
"""
Return the error report
@@ -302,11 +303,11 @@ def get_operation_report(self):
len(self.error_report['empty']), \
len(self.error_report['key_error'])
)
-
+
LOG.debug("error_report complete structure = %s" % (self.error_report))
-
+
return the_str
-
+
@classmethod
def _get_next_date(cls, a_current_date, start_month_beginning = False):
"""
@@ -316,10 +317,10 @@ def _get_next_date(cls, a_current_date, start_month_beginning = False):
dummy_date = a_current_date.replace(day=1)
else:
dummy_date = a_current_date
-
+
# the next date = current date + 1 month
return dummy_date + datetime.timedelta(days=31)
-
+
@classmethod
def check_email_on_disk(cls, a_gstorer, a_id, a_dir = None):
"""
@@ -327,15 +328,15 @@ def check_email_on_disk(cls, a_gstorer, a_id, a_dir = None):
"""
try:
a_dir = a_gstorer.get_directory_from_id(a_id, a_dir)
-
+
if a_dir:
- return a_gstorer.unbury_metadata(a_id, a_dir)
-
- except ValueError, json_error:
+ return a_gstorer.unbury_metadata(a_id, a_dir)
+
+ except ValueError as json_error:
LOG.exception("Cannot read file %s. Try to fetch the data again" % ('%s.meta' % (a_id)), json_error )
-
+
return None
-
+
@classmethod
def _metadata_needs_update(cls, curr_metadata, new_metadata, chat_metadata = False):
"""
@@ -343,44 +344,44 @@ def _metadata_needs_update(cls, curr_metadata, new_metadata, chat_metadata = Fal
"""
if curr_metadata[gmvault_db.GmailStorer.ID_K] != new_metadata['X-GM-MSGID']:
raise Exception("Gmail id has changed for %s" % (curr_metadata['id']))
-
- #check flags
- prev_set = set(new_metadata['FLAGS'])
-
+
+ #check flags
+ prev_set = set(new_metadata['FLAGS'])
+
for flag in curr_metadata['flags']:
if flag not in prev_set:
return True
else:
prev_set.remove(flag)
-
+
if len(prev_set) > 0:
return True
-
+
#check labels
prev_labels = set(new_metadata['X-GM-LABELS'])
-
+
if chat_metadata: #add gmvault-chats labels
prev_labels.add(gmvault_db.GmailStorer.CHAT_GM_LABEL)
-
-
+
+
for label in curr_metadata['labels']:
if label not in prev_labels:
return True
else:
prev_labels.remove(label)
-
+
if len(prev_labels) > 0:
return True
-
+
return False
-
-
+
+
def _check_email_db_ownership(self, ownership_control):
"""
Check email database ownership.
If ownership control activated then fail if a new additional owner is added.
Else if no ownership control allow one more user and save it in the list of owners
-
+
Return the number of owner this will be used to activate or not the db clean.
Activating a db cleaning on a multiownership db would be a catastrophy as it would delete all
the emails from the others users.
@@ -394,34 +395,34 @@ def _check_email_db_ownership(self, ownership_control):
% (self.db_root_dir, ", ".join(db_owners), self.login))
else:
if len(db_owners) == 0:
- LOG.critical("Establish %s as the owner of the Gmvault db %s." % (self.login, self.db_root_dir))
+ LOG.critical("Establish %s as the owner of the Gmvault db %s." % (self.login, self.db_root_dir))
elif len(db_owners) > 0 and self.login not in db_owners:
LOG.critical("The email database %s is hosting emails from %s. It will now also store emails from %s" \
% (self.db_root_dir, ", ".join(db_owners), self.login))
-
+
#try to save db_owner in the list of owners
self.gstorer.store_db_owner(self.login)
-
+
def _sync_chats(self, imap_req, compress, restart):
"""
sync emails
"""
chat_dir = None
-
+
timer = gmvault_utils.Timer() #start local timer for chat
timer.start()
-
+
LOG.debug("Before selection")
if self.src.is_visible('CHATS'):
chat_dir = self.src.select_folder('CHATS')
-
+
LOG.debug("Selection is finished")
if chat_dir:
imap_ids = self._common_sync(timer, "chat", imap_req, compress, restart)
else:
- imap_ids = []
-
+ imap_ids = []
+
LOG.critical("\nchats synchronisation operation performed in %s.\n" % (timer.seconds_to_human_time(timer.elapsed())))
return imap_ids
@@ -429,29 +430,29 @@ def _sync_chats(self, imap_req, compress, restart):
def _common_sync(self, a_timer, a_type, imap_req, compress, restart):
"""
- common syncing method for both emails and chats.
+ common syncing method for both emails and chats.
"""
# get all imap ids in All Mail
imap_ids = self.src.search(imap_req)
last_id_file = self.OP_EMAIL_SYNC if a_type == "email" else self.OP_CHAT_SYNC
-
+
# check if there is a restart
if restart:
LOG.critical("Restart mode activated for emails. Need to find information in Gmail, be patient ...")
imap_ids = self.get_gmails_ids_left_to_sync(last_id_file, imap_ids, imap_req)
-
+
total_nb_msgs_to_process = len(imap_ids) # total number of emails to get
-
+
LOG.critical("%d %ss to be fetched." % (total_nb_msgs_to_process, a_type))
-
+
nb_msgs_processed = 0
-
+
to_fetch = set(imap_ids)
batch_fetcher = IMAPBatchFetcher(self.src, imap_ids, self.error_report, imap_utils.GIMAPFetcher.GET_ALL_BUT_DATA, \
default_batch_size = \
gmvault_utils.get_conf_defaults().getint("General","nb_messages_per_batch",500))
-
+
#choose different bury methods if it is an email or a chat
if a_type == "email":
bury_metadata_fn = self.gstorer.bury_metadata
@@ -463,13 +464,13 @@ def _common_sync(self, a_timer, a_type, imap_req, compress, restart):
chat_metadata = True
else:
raise Exception("Error a_type %s in _common_sync is unknown" % (a_type))
-
+
#LAST Thing to do remove all found ids from imap_ids and if ids left add missing in report
- for new_data in batch_fetcher:
+ for new_data in batch_fetcher:
for the_id in new_data:
if new_data.get(the_id, None):
LOG.debug("\nProcess imap id %s" % ( the_id ))
-
+
gid = new_data[the_id].get(imap_utils.GIMAPFetcher.GMAIL_ID, None)
eml_date = new_data[the_id].get(imap_utils.GIMAPFetcher.IMAP_INTERNALDATE, None)
@@ -477,21 +478,21 @@ def _common_sync(self, a_timer, a_type, imap_req, compress, restart):
LOG.info("Ignore email with id %s. No %s nor %s found in %s." % (the_id, imap_utils.GIMAPFetcher.GMAIL_ID, imap_utils.GIMAPFetcher.IMAP_INTERNALDATE, new_data[the_id]))
self.error_report['empty'].append((the_id, gid if gid else None))
pass #ignore this email and process the next one
-
+
if a_type == "email":
the_dir = gmvault_utils.get_ym_from_datetime(eml_date)
elif a_type == "chat":
the_dir = self.gstorer.get_sub_chats_dir()
else:
raise Exception("Error a_type %s in _common_sync is unknown" % (a_type))
-
+
LOG.critical("Process %s num %d (imap_id:%s) from %s." % (a_type, nb_msgs_processed, the_id, the_dir))
-
+
#decode the labels that are received as utf7 => unicode
try:
new_data[the_id][imap_utils.GIMAPFetcher.GMAIL_LABELS] = \
imap_utils.decode_labels(new_data[the_id][imap_utils.GIMAPFetcher.GMAIL_LABELS])
- except KeyError, ke:
+ except KeyError as ke:
LOG.info("KeyError, reason: %s. new_data[%s]=%s" % (str(ke), the_id, new_data.get(the_id)))
# try to fetch it individually and replace current info if it fails then raise error.
id_info = None
@@ -499,7 +500,7 @@ def _common_sync(self, a_timer, a_type, imap_req, compress, restart):
id_info = batch_fetcher.individual_fetch(the_id)
new_data[the_id][imap_utils.GIMAPFetcher.GMAIL_LABELS] = \
imap_utils.decode_labels(id_info[imap_utils.GIMAPFetcher.GMAIL_LABELS])
- except Exception, err:
+ except Exception as err:
LOG.debug("Error when trying to fetch again information for email id %s. id_info = %s. exception:(%s)" \
% (the_id, id_info, str(err)))
LOG.info("Missing labels information for email id %s. Ignore it\n" % (the_id))
@@ -507,57 +508,57 @@ def _common_sync(self, a_timer, a_type, imap_req, compress, restart):
continue
LOG.debug("metadata info collected: %s\n" % (new_data[the_id]))
-
+
#pass the dir and the ID
curr_metadata = GMVaulter.check_email_on_disk( self.gstorer , \
new_data[the_id][imap_utils.GIMAPFetcher.GMAIL_ID], \
the_dir)
-
+
#if on disk check that the data is not different
if curr_metadata:
-
+
LOG.debug("metadata for %s already exists. Check if different." % (gid))
-
+
if self._metadata_needs_update(curr_metadata, new_data[the_id], chat_metadata):
-
+
LOG.debug("%s with imap id %s and gmail id %s has changed. Updated it." % (a_type, the_id, gid))
-
+
#restore everything at the moment
gid = bury_metadata_fn(new_data[the_id], local_dir = the_dir)
-
+
#update local index id gid => index per directory to be thought out
else:
LOG.debug("On disk metadata for %s is up to date." % (gid))
- else:
+ else:
try:
#get the data
LOG.debug("Get Data for %s." % (gid))
email_data = self.src.fetch(the_id, imap_utils.GIMAPFetcher.GET_DATA_ONLY )
-
+
new_data[the_id][imap_utils.GIMAPFetcher.EMAIL_BODY] = \
email_data[the_id][imap_utils.GIMAPFetcher.EMAIL_BODY]
-
+
LOG.debug("Storing on disk data for %s" % (gid))
- # store data on disk within year month dir
+ # store data on disk within year month dir
gid = bury_data_fn(new_data[the_id], local_dir = the_dir, compress = compress)
-
+
#update local index id gid => index per directory to be thought out
- LOG.debug("Create and store email with imap id %s, gmail id %s." % (the_id, gid))
- except Exception, error:
- handle_sync_imap_error(error, the_id, self.error_report, self.src) #do everything in this handler
-
+ LOG.debug("Create and store email with imap id %s, gmail id %s." % (the_id, gid))
+ except Exception as error:
+ handle_sync_imap_error(error, the_id, self.error_report, self.src) #do everything in this handler
+
nb_msgs_processed += 1
-
+
#indicate every 50 messages the number of messages left to process
left_emails = (total_nb_msgs_to_process - nb_msgs_processed)
-
+
if (nb_msgs_processed % 50) == 0 and (left_emails > 0):
elapsed = a_timer.elapsed() #elapsed time in seconds
LOG.critical("\n== Processed %d emails in %s. %d left to be stored (time estimate %s).==\n" % \
(nb_msgs_processed, \
a_timer.seconds_to_human_time(elapsed), left_emails, \
a_timer.estimate_time_left(nb_msgs_processed, elapsed, left_emails)))
-
+
# save id every 10 restored emails
if (nb_msgs_processed % 10) == 0:
if gid:
@@ -565,15 +566,15 @@ def _common_sync(self, a_timer, a_type, imap_req, compress, restart):
else:
LOG.info("Could not process message with id %s. Ignore it\n" % (the_id))
self.error_report['empty'].append((the_id, gid if gid else None))
-
+
to_fetch -= set(new_data.keys()) #remove all found keys from to_fetch set
-
+
for the_id in to_fetch:
# case when gmail IMAP server returns OK without any data whatsoever
# eg. imap uid 142221L ignore it
LOG.info("Could not process imap with id %s. Ignore it\n" % (the_id))
self.error_report['empty'].append((the_id, None))
-
+
return imap_ids
def _sync_emails(self, imap_req, compress, restart):
@@ -592,61 +593,61 @@ def _sync_emails(self, imap_req, compress, restart):
return imap_ids
-
+
def sync(self, imap_req, compress_on_disk = True, \
db_cleaning = False, ownership_checking = True, \
restart = False, emails_only = False, chats_only = False):
"""
- sync mode
+ sync mode
"""
#check ownership to have one email per db unless user wants different
#save the owner if new
self._check_email_db_ownership(ownership_checking)
-
+
if not compress_on_disk:
LOG.critical("Disable compression when storing emails.")
-
+
if self.use_encryption:
LOG.critical("Encryption activated. All emails will be encrypted before to be stored.")
LOG.critical("Please take care of the encryption key stored in (%s) or all"\
" your stored emails will become unreadable." \
% (gmvault_db.GmailStorer.get_encryption_key_path(self.db_root_dir)))
-
+
self.error_report['operation'] = 'Sync'
-
+
self.timer.start() #start syncing emails
-
+
now = datetime.datetime.now()
LOG.critical("Start synchronization (%s).\n" % (now.strftime('%Y-%m-%dT%Hh%Mm%Ss')))
-
+
if not chats_only:
# backup emails
LOG.critical("Start emails synchronization.")
self._sync_emails(imap_req, compress = compress_on_disk, restart = restart)
else:
LOG.critical("Skip emails synchronization.\n")
-
+
if not emails_only:
# backup chats
LOG.critical("Start chats synchronization.")
self._sync_chats(imap_req, compress = compress_on_disk, restart = restart)
else:
LOG.critical("\nSkip chats synchronization.\n")
-
+
#delete supress emails from DB since last sync
self.check_clean_db(db_cleaning)
-
+
LOG.debug("Sync operation performed in %s.\n" \
% (self.timer.seconds_to_human_time(self.timer.elapsed())))
self.error_report["operation_time"] = self.timer.seconds_to_human_time(self.timer.elapsed())
-
+
#update number of reconnections
self.error_report["reconnections"] = self.src.total_nb_reconns
-
+
return self.error_report
-
+
def _delete_sync(self, imap_ids, db_gmail_ids, db_gmail_ids_info, msg_type):
"""
Delete emails or chats from the database if necessary
@@ -654,90 +655,90 @@ def _delete_sync(self, imap_ids, db_gmail_ids, db_gmail_ids_info, msg_type):
db_gmail_ids_info : info read from metadata
msg_type : email or chat
"""
-
+
# optimize nb of items
nb_items = self.NB_GRP_OF_ITEMS if len(imap_ids) >= self.NB_GRP_OF_ITEMS else len(imap_ids)
-
+
LOG.critical("Call Gmail to check the stored %ss against the Gmail %ss ids and see which ones have been deleted.\n\n"\
- "This might take a few minutes ...\n" % (msg_type, msg_type))
-
+ "This might take a few minutes ...\n" % (msg_type, msg_type))
+
#calculate the list elements to delete
#query nb_items items in one query to minimise number of imap queries
for group_imap_id in itertools.izip_longest(fillvalue=None, *[iter(imap_ids)]*nb_items):
-
+
# if None in list remove it
- if None in group_imap_id:
+ if None in group_imap_id:
group_imap_id = [ im_id for im_id in group_imap_id if im_id != None ]
-
+
data = self.src.fetch(group_imap_id, imap_utils.GIMAPFetcher.GET_GMAIL_ID)
-
+
# syntax for 2.7 set comprehension { data[key][imap_utils.GIMAPFetcher.GMAIL_ID] for key in data }
# need to create a list for 2.6
db_gmail_ids.difference_update([data[key].get(imap_utils.GIMAPFetcher.GMAIL_ID) for key in data if data[key].get(imap_utils.GIMAPFetcher.GMAIL_ID)])
-
+
if len(db_gmail_ids) == 0:
break
-
+
LOG.critical("Will delete %s %s(s) from gmvault db.\n" % (len(db_gmail_ids), msg_type) )
for gm_id in db_gmail_ids:
LOG.critical("gm_id %s not in the Gmail server. Delete it." % (gm_id))
self.gstorer.delete_emails([(gm_id, db_gmail_ids_info[gm_id])], msg_type)
-
+
def search_on_date(self, a_eml_date):
"""
- get eml_date and format it to search
+ get eml_date and format it to search
"""
imap_date = gmvault_utils.datetime2imapdate(a_eml_date)
-
+
imap_req = "SINCE %s" % (imap_date)
imap_ids = self.src.search({'type':'imap', 'req': imap_req})
-
+
return imap_ids
-
+
def get_gmails_ids_left_to_sync(self, op_type, imap_ids, imap_req):#pylint:disable-msg=W0613
"""
Get the ids that still needs to be sync
Return a list of ids
"""
filename = self.OP_TO_FILENAME.get(op_type, None)
-
+
if not filename:
raise Exception("Bad Operation (%s) in save_last_id. "\
"This should not happen, send the error to the software developers." % (op_type))
-
+
filepath = '%s/%s_%s' % (self.gstorer.get_info_dir(), self.login, filename)
-
+
if not os.path.exists(filepath):
LOG.critical("last_id.sync file %s doesn't exist.\nSync the full list of backed up emails." %(filepath))
return imap_ids
-
+
json_obj = json.load(open(filepath, 'r'))
-
+
last_id = json_obj['last_id']
-
+
last_id_index = -1
-
+
new_gmail_ids = imap_ids
-
+
try:
#get imap_id from stored gmail_id
dummy = self.src.search({'type':'imap', 'req':'X-GM-MSGID %s' % (last_id)})
-
+
imap_id = dummy[0]
-
+
last_id_index = imap_ids.index(imap_id)
-
+
LOG.critical("Restart from gmail id %s (imap id %s)." % (last_id, imap_id))
-
- new_gmail_ids = imap_ids[last_id_index:]
- except Exception, _: #ignore any exception and try to get all ids in case of problems. pylint:disable=W0703
+
+ new_gmail_ids = imap_ids[last_id_index:]
+ except Exception as _: #ignore any exception and try to get all ids in case of problems. pylint:disable=W0703
#element not in keys return current set of keys
LOG.critical("Error: Cannot restore from last restore gmail id. It is not in Gmail."\
" Sync the complete list of gmail ids requested from Gmail.")
-
+
return new_gmail_ids
-
+
def check_clean_db(self, db_cleaning):
"""
Check and clean the database (remove file that are not anymore in Gmail)
@@ -749,66 +750,66 @@ def check_clean_db(self, db_cleaning):
elif len(owners) > 1:
LOG.critical("The Gmvault db hosts emails from the following accounts: %s.\n"\
% (", ".join(owners)))
-
+
LOG.critical("Deactivate database cleaning on a multi-owners Gmvault db.")
-
+
return
else:
LOG.critical("Look for emails/chats that are in the Gmvault db but not in Gmail servers anymore.\n")
-
+
#get gmail_ids from db
LOG.critical("Read all gmail ids from the Gmvault db. It might take a bit of time ...\n")
-
+
timer = gmvault_utils.Timer() # needed for enhancing the user information
timer.start()
-
+
db_gmail_ids_info = self.gstorer.get_all_existing_gmail_ids()
-
+
LOG.critical("Found %s email(s) in the Gmvault db.\n" % (len(db_gmail_ids_info)) )
-
+
#create a set of keys
db_gmail_ids = set(db_gmail_ids_info.keys())
-
+
# get all imap ids in All Mail
self.src.select_folder('ALLMAIL') #go to all mail
imap_ids = self.src.search(imap_utils.GIMAPFetcher.IMAP_ALL) #search all
-
+
LOG.debug("Got %s emails imap_id(s) from the Gmail Server." % (len(imap_ids)))
-
+
#delete supress emails from DB since last sync
self._delete_sync(imap_ids, db_gmail_ids, db_gmail_ids_info, 'email')
-
+
# get all chats ids
if self.src.is_visible('CHATS'):
-
+
db_gmail_ids_info = self.gstorer.get_all_chats_gmail_ids()
-
+
LOG.critical("Found %s chat(s) in the Gmvault db.\n" % (len(db_gmail_ids_info)) )
-
+
self.src.select_folder('CHATS') #go to chats
chat_ids = self.src.search(imap_utils.GIMAPFetcher.IMAP_ALL)
-
+
db_chat_ids = set(db_gmail_ids_info.keys())
-
+
LOG.debug("Got %s chat imap_ids from the Gmail Server." % (len(chat_ids)))
-
+
#delete supress emails from DB since last sync
self._delete_sync(chat_ids, db_chat_ids, db_gmail_ids_info , 'chat')
else:
LOG.critical("Chats IMAP Directory not visible on Gmail. Ignore deletion of chats.")
-
-
+
+
LOG.critical("\nDeletion checkup done in %s." % (timer.elapsed_human_time()))
-
-
+
+
def remote_sync(self):
"""
Sync with a remote source (IMAP mirror or cloud storage area)
"""
- #sync remotely
+ #sync remotely
pass
-
-
+
+
def save_lastid(self, op_type, gm_id, eml_date=None, imap_req=None):#pylint:disable-msg=W0613
"""
Save the passed gmid in last_id.restore
@@ -863,81 +864,81 @@ def get_gmails_ids_left_to_restore(self, op_type, db_gmail_ids_info):
last_id_index = -1
try:
- keys = db_gmail_ids_info.keys()
+ keys = list(db_gmail_ids_info.keys())
last_id_index = keys.index(last_id)
LOG.critical("Restart from gmail id %s." % last_id)
- except ValueError, _:
+ except ValueError as _:
#element not in keys return current set of keys
LOG.error("Cannot restore from last restore gmail id. It is not in the disk database.")
new_gmail_ids_info = collections_utils.OrderedDict()
if last_id_index != -1:
- for key in db_gmail_ids_info.keys()[last_id_index+1:]:
+ for key in list(db_gmail_ids_info.keys())[last_id_index+1:]:
new_gmail_ids_info[key] = db_gmail_ids_info[key]
else:
- new_gmail_ids_info = db_gmail_ids_info
-
- return new_gmail_ids_info
-
+ new_gmail_ids_info = db_gmail_ids_info
+
+ return new_gmail_ids_info
+
def restore(self, pivot_dir = None, extra_labels = [], \
restart = False, emails_only = False, chats_only = False): #pylint:disable=W0102
"""
Restore emails in a gmail account
"""
-
+
self.error_report['operation'] = 'Sync'
self.timer.start() #start restoring
-
+
now = datetime.datetime.now()
LOG.critical("Start restoration (%s).\n" % (now.strftime('%Y-%m-%dT%Hh%Mm%Ss')))
-
+
if not chats_only:
# backup emails
LOG.critical("Start emails restoration.\n")
-
+
if pivot_dir:
LOG.critical("Quick mode activated. Will only restore all emails since %s.\n" % (pivot_dir))
-
+
self.restore_emails(pivot_dir, extra_labels, restart)
else:
LOG.critical("Skip emails restoration.\n")
-
+
if not emails_only:
# backup chats
LOG.critical("Start chats restoration.\n")
self.restore_chats(extra_labels, restart)
else:
LOG.critical("Skip chats restoration.\n")
-
+
LOG.debug("Restore operation performed in %s.\n" \
% (self.timer.seconds_to_human_time(self.timer.elapsed())))
-
+
self.error_report["operation_time"] = self.timer.seconds_to_human_time(self.timer.elapsed())
-
+
#update number of reconnections
self.error_report["reconnections"] = self.src.total_nb_reconns
-
+
return self.error_report
-
+
def restore_chats(self, extra_labels = [], restart = False): #pylint:disable=W0102
"""
restore chats
"""
- LOG.critical("Restore chats in gmail account %s." % (self.login) )
-
+ LOG.critical("Restore chats in gmail account %s." % (self.login) )
+
LOG.critical("Read chats info from %s gmvault-db." % (self.db_root_dir))
-
+
#get gmail_ids from db
db_gmail_ids_info = self.gstorer.get_all_chats_gmail_ids()
-
- LOG.critical("Total number of chats to restore %s." % (len(db_gmail_ids_info.keys())))
-
+
+ LOG.critical("Total number of chats to restore %s." % (len(list(db_gmail_ids_info.keys()))))
+
if restart:
db_gmail_ids_info = self.get_gmails_ids_left_to_restore(self.OP_CHAT_RESTORE, db_gmail_ids_info)
-
+
total_nb_emails_to_restore = len(db_gmail_ids_info)
LOG.critical("Got all chats id left to restore. Still %s chats to do.\n" % (total_nb_emails_to_restore) )
-
+
existing_labels = set() #set of existing labels to not call create_gmail_labels all the time
reserved_labels_map = gmvault_utils.get_conf_defaults().get_dict("Restore", "reserved_labels_map", \
{ u'migrated' : u'gmv-migrated', u'\muted' : u'gmv-muted' })
@@ -946,76 +947,76 @@ def restore_chats(self, extra_labels = [], restart = False): #pylint:disable=W01
#get all mail folder name
all_mail_name = self.src.get_folder_name("ALLMAIL")
-
+
# go to DRAFTS folder because if you are in ALL MAIL when uploading emails it is very slow
folder_def_location = gmvault_utils.get_conf_defaults().get("General", "restore_default_location", "DRAFTS")
self.src.select_folder(folder_def_location)
-
+
timer = gmvault_utils.Timer() # local timer for restore emails
timer.start()
-
- nb_items = gmvault_utils.get_conf_defaults().get_int("General", "nb_messages_per_restore_batch", 100)
-
- for group_imap_ids in itertools.izip_longest(fillvalue=None, *[iter(db_gmail_ids_info)]*nb_items):
+
+ nb_items = gmvault_utils.get_conf_defaults().get_int("General", "nb_messages_per_restore_batch", 100)
+
+ for group_imap_ids in itertools.izip_longest(fillvalue=None, *[iter(db_gmail_ids_info)]*nb_items):
last_id = group_imap_ids[-1] #will be used to save the last id
#remove all None elements from group_imap_ids
group_imap_ids = itertools.ifilter(lambda x: x != None, group_imap_ids)
-
+
labels_to_create = set(extra_labels) #create label set, add xtra labels in set
-
+
LOG.critical("Processing next batch of %s chats.\n" % (nb_items))
-
+
# unbury the metadata for all these emails
- for gm_id in group_imap_ids:
+ for gm_id in group_imap_ids:
try:
email_meta, email_data = self.gstorer.unbury_email(gm_id)
-
+
LOG.critical("Pushing chat content with id %s." % (gm_id))
LOG.debug("Subject = %s." % (email_meta[self.gstorer.SUBJECT_K]))
-
+
# push data in gmail account and get uids
imap_id = self.src.push_data(all_mail_name, email_data, \
email_meta[self.gstorer.FLAGS_K] , \
- email_meta[self.gstorer.INT_DATE_K] )
-
+ email_meta[self.gstorer.INT_DATE_K] )
+
#labels for this email => real_labels U extra_labels
labels = set(email_meta[self.gstorer.LABELS_K])
-
+
# add in the labels_to_create struct
for label in labels:
LOG.debug("label = %s\n" % (label))
- if label.lower() in reserved_labels_map.keys(): #exclude creation of migrated label
+ if label.lower() in list(reserved_labels_map.keys()): #exclude creation of migrated label
n_label = reserved_labels_map.get(label.lower(), "gmv-default-label")
LOG.info("Apply label '%s' instead of '%s' (lower or uppercase)"\
" because it is a Gmail reserved label." % (n_label, label))
label = n_label
labels_to_apply[str(label)] = imap_id #add in multimap
-
+
# get list of labels to create (do a union with labels to create)
- #labels_to_create.update([ label for label in labels if label not in existing_labels])
+ #labels_to_create.update([ label for label in labels if label not in existing_labels])
labels_to_create.update([ label for label in labels_to_apply.keys() \
- if label not in existing_labels])
+ if label not in existing_labels])
- for ex_label in extra_labels:
+ for ex_label in extra_labels:
labels_to_apply[ex_label] = imap_id
-
- except Exception, err:
+
+ except Exception as err:
handle_restore_imap_error(err, gm_id, db_gmail_ids_info, self)
#create the non existing labels and update existing labels
if len(labels_to_create) > 0:
LOG.debug("Labels creation tentative for chats ids %s." % (group_imap_ids))
existing_labels = self.src.create_gmail_labels(labels_to_create, existing_labels)
-
+
# associate labels with emails
LOG.critical("Applying labels to the current batch of chats.")
try:
LOG.debug("Changing directory. Going into ALLMAIL")
self.src.select_folder('ALLMAIL') #go to ALL MAIL to make STORE usable
for label in labels_to_apply.keys():
- self.src.apply_labels_to(labels_to_apply[label], [label])
- except Exception, err:
+ self.src.apply_labels_to(labels_to_apply[label], [label])
+ except Exception as err:
LOG.error("Problem when applying labels %s to the following ids: %s" %(label, labels_to_apply[label]), err)
if isinstance(err, imap_utils.LabelError) and err.ignore() == True:
LOG.critical("Ignore labelling: %s" % (err))
@@ -1031,25 +1032,25 @@ def restore_chats(self, extra_labels = [], restart = False): #pylint:disable=W01
finally:
self.src.select_folder(folder_def_location) # go back to an empty DIR (Drafts) to be fast
labels_to_apply = collections_utils.SetMultimap() #reset label to apply
-
+
nb_emails_restored += nb_items
-
+
#indicate every 10 messages the number of messages left to process
left_emails = (total_nb_emails_to_restore - nb_emails_restored)
-
- if (left_emails > 0):
+
+ if (left_emails > 0):
elapsed = timer.elapsed() #elapsed time in seconds
LOG.critical("\n== Processed %d chats in %s. %d left to be restored "\
"(time estimate %s).==\n" % \
(nb_emails_restored, timer.seconds_to_human_time(elapsed), \
left_emails, timer.estimate_time_left(nb_emails_restored, elapsed, left_emails)))
-
+
# save id every nb_items restored emails
# add the last treated gm_id
self.save_lastid(self.OP_CHAT_RESTORE, last_id)
-
- return self.error_report
-
+
+ return self.error_report
+
def restore_emails(self, pivot_dir = None, extra_labels = [], restart = False):
"""
restore emails in a gmail account using batching to group restore
@@ -1059,22 +1060,22 @@ def restore_emails(self, pivot_dir = None, extra_labels = [], restart = False):
The idea is to get a batch of 50 emails and push them all in the mailbox one by one and get the uid for each of them.
Then create a dict of labels => uid_list and for each label send a unique store command after having changed dir
"""
- LOG.critical("Restore emails in gmail account %s." % (self.login) )
-
+ LOG.critical("Restore emails in gmail account %s." % (self.login) )
+
LOG.critical("Read email info from %s gmvault-db." % (self.db_root_dir))
-
+
#get gmail_ids from db
db_gmail_ids_info = self.gstorer.get_all_existing_gmail_ids(pivot_dir)
-
- LOG.critical("Total number of elements to restore %s." % (len(db_gmail_ids_info.keys())))
-
+
+ LOG.critical("Total number of elements to restore %s." % (len(list(db_gmail_ids_info.keys()))))
+
if restart:
db_gmail_ids_info = self.get_gmails_ids_left_to_restore(self.OP_EMAIL_RESTORE, db_gmail_ids_info)
-
+
total_nb_emails_to_restore = len(db_gmail_ids_info)
-
+
LOG.critical("Got all emails id left to restore. Still %s emails to do.\n" % (total_nb_emails_to_restore) )
-
+
existing_labels = set() #set of existing labels to not call create_gmail_labels all the time
reserved_labels_map = gmvault_utils.get_conf_defaults().get_dict("Restore", "reserved_labels_map", { u'migrated' : u'gmv-migrated', u'\muted' : u'gmv-muted' })
nb_emails_restored = 0 #to count nb of emails restored
@@ -1082,42 +1083,42 @@ def restore_emails(self, pivot_dir = None, extra_labels = [], restart = False):
#get all mail folder name
all_mail_name = self.src.get_folder_name("ALLMAIL")
-
+
# go to DRAFTS folder because if you are in ALL MAIL when uploading emails it is very slow
folder_def_location = gmvault_utils.get_conf_defaults().get("General", "restore_default_location", "DRAFTS")
self.src.select_folder(folder_def_location)
-
+
timer = gmvault_utils.Timer() # local timer for restore emails
timer.start()
-
- nb_items = gmvault_utils.get_conf_defaults().get_int("General", "nb_messages_per_restore_batch", 80)
-
- for group_imap_ids in itertools.izip_longest(fillvalue=None, *[iter(db_gmail_ids_info)]*nb_items):
-
+
+ nb_items = gmvault_utils.get_conf_defaults().get_int("General", "nb_messages_per_restore_batch", 80)
+
+ for group_imap_ids in itertools.izip_longest(fillvalue=None, *[iter(db_gmail_ids_info)]*nb_items):
+
last_id = group_imap_ids[-1] #will be used to save the last id
#remove all None elements from group_imap_ids
group_imap_ids = itertools.ifilter(lambda x: x != None, group_imap_ids)
-
+
labels_to_create = set(extra_labels) #create label set and add extra labels to apply to all emails
-
+
LOG.critical("Processing next batch of %s emails.\n" % (nb_items))
-
+
# unbury the metadata for all these emails
- for gm_id in group_imap_ids:
+ for gm_id in group_imap_ids:
try:
LOG.debug("Unbury email with gm_id %s." % (gm_id))
email_meta, email_data = self.gstorer.unbury_email(gm_id)
-
+
LOG.critical("Pushing email body with id %s." % (gm_id))
LOG.debug("Subject = %s." % (email_meta[self.gstorer.SUBJECT_K]))
-
+
# push data in gmail account and get uids
imap_id = self.src.push_data(all_mail_name, email_data, \
email_meta[self.gstorer.FLAGS_K] , \
- email_meta[self.gstorer.INT_DATE_K] )
-
+ email_meta[self.gstorer.INT_DATE_K] )
+
#labels for this email => real_labels U extra_labels
labels = set(email_meta[self.gstorer.LABELS_K])
@@ -1126,29 +1127,29 @@ def restore_emails(self, pivot_dir = None, extra_labels = [], restart = False):
if label != "\\Starred":
#LOG.debug("label = %s\n" % (label.encode('utf-8')))
LOG.debug("label = %s\n" % (label))
- if label.lower() in reserved_labels_map.keys(): #exclude creation of migrated label
+ if label.lower() in list(reserved_labels_map.keys()): #exclude creation of migrated label
n_label = reserved_labels_map.get(label.lower(), "gmv-default-label")
LOG.info("Apply label '%s' instead of '%s' (lower or uppercase)"\
- " because it is a Gmail reserved label." % (n_label, label))
+ " because it is a Gmail reserved label." % (n_label, label))
label = n_label
labels_to_apply[label] = imap_id #add item in multimap
-
+
# get list of labels to create (do a union with labels to create)
- #labels_to_create.update([ label for label in labels if label not in existing_labels])
+ #labels_to_create.update([ label for label in labels if label not in existing_labels])
labels_to_create.update([ label for label in labels_to_apply.keys() \
- if label not in existing_labels])
+ if label not in existing_labels])
- for ex_label in extra_labels:
+ for ex_label in extra_labels:
labels_to_apply[ex_label] = imap_id
-
- except Exception, err:
+
+ except Exception as err:
handle_restore_imap_error(err, gm_id, db_gmail_ids_info, self)
#create the non existing labels and update existing labels
if len(labels_to_create) > 0:
LOG.debug("Labels creation tentative for emails with ids %s." % (group_imap_ids))
existing_labels = self.src.create_gmail_labels(labels_to_create, existing_labels)
-
+
# associate labels with emails
LOG.critical("Applying labels to the current batch of emails.")
try:
@@ -1158,8 +1159,8 @@ def restore_emails(self, pivot_dir = None, extra_labels = [], restart = False):
self.src.select_folder('ALLMAIL') #go to ALL MAIL to make STORE usable
LOG.debug("Changed dir. Operation time = %s ms" % (the_timer.elapsed_ms()))
for label in labels_to_apply.keys():
- self.src.apply_labels_to(labels_to_apply[label], [label])
- except Exception, err:
+ self.src.apply_labels_to(labels_to_apply[label], [label])
+ except Exception as err:
LOG.error("Problem when applying labels %s to the following ids: %s" %(label, labels_to_apply[label]), err)
if isinstance(err, imap_utils.LabelError) and err.ignore() == True:
LOG.critical("Ignore labelling: %s" % (err))
@@ -1175,22 +1176,22 @@ def restore_emails(self, pivot_dir = None, extra_labels = [], restart = False):
finally:
self.src.select_folder(folder_def_location) # go back to an empty DIR (Drafts) to be fast
labels_to_apply = collections_utils.SetMultimap() #reset label to apply
-
+
nb_emails_restored += nb_items
-
+
#indicate every 10 messages the number of messages left to process
left_emails = (total_nb_emails_to_restore - nb_emails_restored)
-
- if (left_emails > 0):
+
+ if (left_emails > 0):
elapsed = timer.elapsed() #elapsed time in seconds
LOG.critical("\n== Processed %d emails in %s. %d left to be restored "\
"(time estimate %s). ==\n" % \
(nb_emails_restored, timer.seconds_to_human_time(elapsed), \
left_emails, timer.estimate_time_left(nb_emails_restored, elapsed, left_emails)))
-
+
# save id every 50 restored emails
# add the last treated gm_id
self.save_lastid(self.OP_EMAIL_RESTORE, last_id)
-
- return self.error_report
-
+
+ return self.error_report
+
diff --git a/src/gmv/gmvault_db.py b/src/gmv/gmvault_db.py
index c06407a2..1f04c07a 100755
--- a/src/gmv/gmvault_db.py
+++ b/src/gmv/gmvault_db.py
@@ -16,6 +16,7 @@
along with this program. If not, see .
"""
+from __future__ import absolute_import
from contextlib import contextmanager
import json
import gzip
@@ -25,15 +26,16 @@
import fnmatch
import shutil
import codecs
-import StringIO
+import io
-import gmv.blowfish as blowfish
-import gmv.log_utils as log_utils
+from . import blowfish
+from . import log_utils
-import gmv.collections_utils as collections_utils
-import gmv.gmvault_utils as gmvault_utils
-import gmv.imap_utils as imap_utils
-import gmv.credential_utils as credential_utils
+from . import collections_utils
+from . import gmvault_utils
+from . import imap_utils
+from . import credential_utils
+import six
LOG = log_utils.LoggerFactory.get_logger('gmvault_db')
@@ -76,7 +78,7 @@ class GmailStorer(object): #pylint:disable=R0902,R0904,R0914
INFO_AREA = '.info' # contains metadata concerning the database
ENCRYPTION_KEY_FILENAME = '.storage_key.sec'
EMAIL_OWNER = '.owner_account.info'
- GMVAULTDB_VERSION = '.gmvault_db_version.info'
+ GMVAULTDB_VERSION = '.gmvault_db_version.info'
def __init__(self, a_storage_dir, encrypt_data=False):
"""
@@ -142,7 +144,7 @@ def _init_sub_chats_dir(self):
self._sub_chats_dir = self.SUB_CHAT_AREA % ("subchats-%s" % (self._sub_chats_inc))
gmvault_utils.makedirs("%s/%s" % (self._db_dir, self._sub_chats_dir))
- # treat when more than limit chats in max dir
+ # treat when more than limit chats in max dir
# treat when no dirs
# add limit as attribute limit_per_dir = 2000
else:
@@ -150,7 +152,7 @@ def _init_sub_chats_dir(self):
files = os.listdir("%s/%s" % (self._chats_dir, nb_to_dir[the_max]))
self._sub_chats_nb = len(files)/2
self._sub_chats_inc = the_max
- self._sub_chats_dir = self.SUB_CHAT_AREA % nb_to_dir[the_max]
+ self._sub_chats_dir = self.SUB_CHAT_AREA % nb_to_dir[the_max]
def get_sub_chats_dir(self):
"""
@@ -201,7 +203,7 @@ def get_db_owners(self):
cases, the db will be only linked to one email.
"""
fname = '%s/%s' % (self._info_dir, self.EMAIL_OWNER)
- if os.path.exists(fname):
+ if os.path.exists(fname):
with open(fname, 'r') as f:
list_of_owners = json.load(f)
return list_of_owners
@@ -211,7 +213,7 @@ def get_db_owners(self):
def get_info_dir(self):
"""
Return the info dir of gmvault-db
- """
+ """
return self._info_dir
def get_encryption_cipher(self):
@@ -263,15 +265,15 @@ def parse_header_fields(cls, header_fields):
encod = "not found"
try:
encod = gmvault_utils.guess_encoding(tempo, use_encoding_list = False)
- u_tempo = unicode(tempo, encoding = encod)
- except gmvault_utils.GuessEncoding, enc_err:
+ u_tempo = six.text_type(tempo, encoding = encod)
+ except gmvault_utils.GuessEncoding as enc_err:
#it is already in unicode so ignore encoding
u_tempo = tempo
- except Exception, e:
+ except Exception as e:
LOG.critical(e)
LOG.critical("Warning: Guessed encoding = (%s). Ignore those characters" % (encod))
#try utf-8
- u_tempo = unicode(tempo, encoding="utf-8", errors='replace')
+ u_tempo = six.text_type(tempo, encoding="utf-8", errors='replace')
if u_tempo:
subject = u_tempo.encode('utf-8')
@@ -290,7 +292,7 @@ def parse_header_fields(cls, header_fields):
def get_all_chats_gmail_ids(self):
"""
- Get only chats dirs
+ Get only chats dirs
"""
# first create a normal dir and sort it below with an OrderedDict
# beware orderedDict preserve order by insertion and not by key order
@@ -303,19 +305,19 @@ def get_all_chats_gmail_ids(self):
#get all ids
for filepath in the_iter:
directory, fname = os.path.split(filepath)
- gmail_ids[long(os.path.splitext(fname)[0])] = os.path.basename(directory)
+ gmail_ids[int(os.path.splitext(fname)[0])] = os.path.basename(directory)
- #sort by key
+ #sort by key
#used own orderedDict to be compliant with version 2.5
gmail_ids = collections_utils.OrderedDict(
- sorted(gmail_ids.items(), key=lambda t: t[0]))
+ sorted(list(gmail_ids.items()), key=lambda t: t[0]))
return gmail_ids
def get_all_existing_gmail_ids(self, pivot_dir=None,
ignore_sub_dir=('chats',)):
"""
- get all existing gmail_ids from the database within the passed month
+ get all existing gmail_ids from the database within the passed month
and all posterior months
"""
# first create a normal dir and sort it below with an OrderedDict
@@ -343,11 +345,11 @@ def get_all_existing_gmail_ids(self, pivot_dir=None,
#get all ids
for filepath in the_iter:
directory, fname = os.path.split(filepath)
- gmail_ids[long(os.path.splitext(fname)[0])] = os.path.basename(directory)
+ gmail_ids[int(os.path.splitext(fname)[0])] = os.path.basename(directory)
- #sort by key
+ #sort by key
#used own orderedDict to be compliant with version 2.5
- gmail_ids = collections_utils.OrderedDict(sorted(gmail_ids.items(),
+ gmail_ids = collections_utils.OrderedDict(sorted(list(gmail_ids.items()),
key=lambda t: t[0]))
return gmail_ids
@@ -384,10 +386,10 @@ def bury_metadata(self, email_info, local_dir=None, extra_labels=()):
# come from imap_lib when label is a number
labels = []
for label in email_info[imap_utils.GIMAPFetcher.GMAIL_LABELS]:
- if isinstance(label, (int, long, float, complex)):
+ if isinstance(label, (int, int, float, complex)):
label = str(label)
- labels.append(unicode(gmvault_utils.remove_consecutive_spaces_and_strip(label)))
+ labels.append(six.text_type(gmvault_utils.remove_consecutive_spaces_and_strip(label)))
labels.extend(extra_labels) #add extra labels
@@ -444,7 +446,7 @@ def bury_email(self, email_info, local_dir=None, compress=False,
#if compress:
# data_path = '%s.gz' % data_path
- # data_desc = StringIO.StringIO()
+ # data_desc = io.BytesIO()
#else:
# data_desc = open(data_path, 'wb')
@@ -480,7 +482,7 @@ def bury_email(self, email_info, local_dir=None, compress=False,
else:
#convert email content to unicode
data = gmvault_utils.convert_to_unicode(email_info[imap_utils.GIMAPFetcher.EMAIL_BODY])
-
+
# write in chunks of one 1 MB
for chunk in gmvault_utils.chunker(data, 1048576):
data_desc.write(chunk.encode('utf-8'))
@@ -575,7 +577,7 @@ def quarantine_email(self, a_id):
q_meta_path = os.path.join(self._quarantine_dir, os.path.basename(meta))
if os.path.exists(q_data_path):
- os.remove(q_data_path)
+ os.remove(q_data_path)
if os.path.exists(q_meta_path):
os.remove(q_meta_path)
@@ -635,7 +637,7 @@ def unbury_data(self, a_id, a_id_dir=None):
else:
data = f.read()
- return data
+ return data
def unbury_metadata(self, a_id, a_id_dir=None):
"""
@@ -649,16 +651,16 @@ def unbury_metadata(self, a_id, a_id_dir=None):
metadata[self.INT_DATE_K] = gmvault_utils.e2datetime(
metadata[self.INT_DATE_K])
-
+
# force conversion of labels as string because IMAPClient
# returns a num when the label is a number (ie. '00000') and handle utf-8
new_labels = []
for label in metadata[self.LABELS_K]:
- if isinstance(label, (int, long, float, complex)):
+ if isinstance(label, (int, int, float, complex)):
label = str(label)
- new_labels.append(unicode(label))
-
+ new_labels.append(six.text_type(label))
+
metadata[self.LABELS_K] = new_labels
return metadata
@@ -701,8 +703,8 @@ def delete_emails(self, emails_info, msg_type):
elif os.path.exists(comp_data_p):
os.rename(comp_data_p, '%s.gz' % bin_p)
elif os.path.exists(cryp_comp_data_p):
- os.rename(cryp_comp_data_p, '%s.crypt.gz' % bin_p)
-
+ os.rename(cryp_comp_data_p, '%s.crypt.gz' % bin_p)
+
if os.path.exists(metadata_p):
os.rename(metadata_p, metadata_bin_p)
else:
@@ -712,7 +714,7 @@ def delete_emails(self, emails_info, msg_type):
elif os.path.exists(comp_data_p):
os.remove(comp_data_p)
elif os.path.exists(cryp_comp_data_p):
- os.remove(cryp_comp_data_p)
+ os.remove(cryp_comp_data_p)
if os.path.exists(metadata_p):
os.remove(metadata_p)
diff --git a/src/gmv/gmvault_export.py b/src/gmv/gmvault_export.py
index 488387aa..cf76d93f 100644
--- a/src/gmv/gmvault_export.py
+++ b/src/gmv/gmvault_export.py
@@ -14,10 +14,11 @@
You should have received a copy of the GNU General Public License
along with this program. If not, see .
+
+ Export module of Gmvault created by dave@vasilevsky.ca
'''
-'''
- Export function of Gmvault created by dave@vasilevsky.ca
-'''
+
+from __future__ import absolute_import
import os
import re
@@ -25,10 +26,10 @@
import imapclient.imap_utf7 as imap_utf7
-import gmv.imap_utils as imap_utils
-import gmv.log_utils as log_utils
-import gmv.gmvault_utils as gmvault_utils
-import gmv.gmvault_db as gmvault_db
+from . import imap_utils
+from . import log_utils
+from . import gmvault_utils
+from . import gmvault_db
LOG = log_utils.LoggerFactory.get_logger('gmvault_export')
diff --git a/src/gmv/gmvault_utils.py b/src/gmv/gmvault_utils.py
index 9503f98a..f4ad8f32 100755
--- a/src/gmv/gmvault_utils.py
+++ b/src/gmv/gmvault_utils.py
@@ -17,6 +17,8 @@
along with this program. If not, see .
'''
+from __future__ import absolute_import, print_function
+
import os
import re
@@ -26,17 +28,20 @@
import fnmatch
import functools
-import StringIO
+import io
import sys
import traceback
-import random
+import random
import locale
import urllib
import chardet
-import gmv.log_utils as log_utils
-import gmv.conf.conf_helper
-import gmv.gmvault_const as gmvault_const
+from . import gmvault_const
+from . import log_utils
+from .conf import conf_helper
+
+import six
+from six.moves import range
LOG = log_utils.LoggerFactory.get_logger('gmvault_utils')
@@ -67,7 +72,7 @@ def __repr__(self):
def __get__(self, obj, objtype):
"""Support instance methods."""
return functools.partial(self.__call__, obj)
-
+
class Curry:
""" Class used to implement the currification (functional programming technic) :
Create a function from another one by instanciating some of its parameters.
@@ -77,7 +82,7 @@ def __init__(self, fun, *args, **kwargs):
self.fun = fun
self.pending = args[:]
self.kwargs = kwargs.copy()
-
+
def __call__(self, *args, **kwargs):
if kwargs and self.kwargs:
the_kw = self.kwargs.copy()
@@ -91,33 +96,33 @@ def __call__(self, *args, **kwargs):
LETTERS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
DIGITS = '0123456789'
-def make_password(minlength=8, maxlength=16):
+def make_password(minlength=8, maxlength=16):
"""
generate randomw password
"""
length = random.randint(minlength, maxlength)
- letters = LETTERS + DIGITS
- return ''.join([random.choice(letters) for _ in range(length)])
+ letters = LETTERS + DIGITS
+ return ''.join([random.choice(letters) for _ in range(length)])
def get_exception_traceback():
"""
return the exception traceback (stack info and so on) in a string
-
+
Args:
None
-
+
Returns:
return a string that contains the exception traceback
-
+
Raises:
-
+
"""
-
- the_file = StringIO.StringIO()
+
+ the_file = io.StringIO()
exception_type, exception_value, exception_traceback = sys.exc_info() #IGNORE:W0702
- traceback.print_exception(exception_type, exception_value, exception_traceback, file = the_file)
+ traceback.print_exception(exception_type, exception_value, exception_traceback, file=the_file)
return the_file.getvalue()
@@ -140,44 +145,44 @@ class Timer(object):
Timer Class to mesure time.
Possess also few time utilities
"""
-
-
+
+
def __init__(self):
-
+
self._start = None
-
+
def start(self):
"""
start the timer
"""
self._start = time.time()
-
+
def reset(self):
"""
reset the timer to 0
"""
self._start = time.time()
-
+
def elapsed(self):
"""
return elapsed time in sec
"""
now = time.time()
-
+
return int(round(now - self._start))
-
+
def elapsed_ms(self):
"""
return elapsed time up to micro second
"""
return time.time() - self._start
-
+
def elapsed_human_time(self, suffixes=TIMER_SUFFIXES, add_s=False, separator=' '):#pylint:disable=W0102
"""
Takes an amount of seconds and turns it into a human-readable amount of time.
"""
seconds = self.elapsed()
-
+
return self.seconds_to_human_time(seconds, suffixes, add_s, separator)
@classmethod
@@ -189,7 +194,7 @@ def estimate_time_left(cls, nb_elem_done, in_sec, still_to_be_done, in_human_tim
return cls.seconds_to_human_time(int(round(float(still_to_be_done * in_sec)/nb_elem_done)))
else:
return int(round(float(still_to_be_done * in_sec)/nb_elem_done))
-
+
@classmethod
def seconds_to_human_time(cls, seconds, suffixes=TIMER_SUFFIXES, add_s=False, separator=' '):#pylint:disable=W0102
"""
@@ -197,7 +202,7 @@ def seconds_to_human_time(cls, seconds, suffixes=TIMER_SUFFIXES, add_s=False, se
"""
# the formatted time string to be returned
the_time = []
-
+
# the pieces of time to iterate over (days, hours, minutes, etc)
# - the first piece in each tuple is the suffix (d, h, w)
# - the second piece is the length in seconds (a day is 60s * 60m * 24h)
@@ -207,10 +212,10 @@ def seconds_to_human_time(cls, seconds, suffixes=TIMER_SUFFIXES, add_s=False, se
(suffixes[3], 60 * 60),
(suffixes[4], 60),
(suffixes[5], 1)]
-
+
if seconds < 1: #less than a second case
return "less than a second"
-
+
# for each time piece, grab the value and remaining seconds, and add it to
# the time string
for suffix, length in parts:
@@ -221,27 +226,27 @@ def seconds_to_human_time(cls, seconds, suffixes=TIMER_SUFFIXES, add_s=False, se
(suffix, (suffix, suffix + 's')[value > 1])[add_s]))
if seconds < 1:
break
-
+
return separator.join(the_time)
-ZERO = datetime.timedelta(0)
-# A UTC class.
-class UTC(datetime.tzinfo):
- """UTC Timezone"""
-
+ZERO = datetime.timedelta(0)
+# A UTC class.
+class UTC(datetime.tzinfo):
+ """UTC Timezone"""
+
def utcoffset(self, a_dt): #pylint: disable=W0613
- ''' return utcoffset '''
- return ZERO
-
+ ''' return utcoffset '''
+ return ZERO
+
def tzname(self, a_dt): #pylint: disable=W0613
- ''' return tzname '''
- return "UTC"
-
- def dst(self, a_dt): #pylint: disable=W0613
- ''' return dst '''
- return ZERO
-
-# pylint: enable-msg=W0613
+ ''' return tzname '''
+ return "UTC"
+
+ def dst(self, a_dt): #pylint: disable=W0613
+ ''' return dst '''
+ return ZERO
+
+# pylint: enable-msg=W0613
UTC_TZ = UTC()
def get_ym_from_datetime(a_datetime):
@@ -250,7 +255,7 @@ def get_ym_from_datetime(a_datetime):
"""
if a_datetime:
return a_datetime.strftime('%Y-%m')
-
+
return None
MONTH_CONV = { 1: 'Jan', 4: 'Apr', 6: 'Jun', 7: 'Jul', 10: 'Oct' , 12: 'Dec',
@@ -272,34 +277,34 @@ def compare_yymm_dir(first, second):
0 if equal
-1 if second > first
"""
-
+
matched = MONTH_YEAR_RE.match(first)
-
+
if matched:
first_year = int(matched.group('year'))
first_month = int(matched.group('month'))
-
+
first_val = (first_year * 1000) + first_month
else:
raise Exception("Invalid Year-Month expression (%s). Please correct it to be yyyy-mm" % (first))
-
+
matched = MONTH_YEAR_RE.match(second)
-
+
if matched:
second_year = int(matched.group('year'))
second_month = int(matched.group('month'))
-
+
second_val = (second_year * 1000) + second_month
else:
raise Exception("Invalid Year-Month expression (%s). Please correct it" % (second))
-
+
if first_val > second_val:
return 1
elif first_val == second_val:
return 0
else:
return -1
-
+
def cmp_to_key(mycmp):
"""
Taken from functools. Not in all python versions so had to redefine it
@@ -324,13 +329,13 @@ def __ne__(self, other):
def __hash__(self):
raise TypeError('hash not implemented')
return Key
-
+
def get_all_dirs_posterior_to(a_dir, dirs):
"""
get all directories posterior
"""
#sort the passed dirs list and return all dirs posterior to a_dir
-
+
return [ name for name in sorted(dirs, key=cmp_to_key(compare_yymm_dir))\
if compare_yymm_dir(a_dir, name) <= 0 ]
@@ -339,7 +344,7 @@ def get_all_dirs_under(root_dir, ignored_dirs = []):#pylint:disable=W0102
Get all directory names under (1 level only) the root dir
params:
root_dir : the dir to look under
- ignored_dir: ignore the dir if it is in this list of dirnames
+ ignored_dir: ignore the dir if it is in this list of dirnames
"""
return [ name for name in os.listdir(root_dir) \
if ( os.path.isdir(os.path.join(root_dir, name)) \
@@ -350,13 +355,13 @@ def datetime2imapdate(a_datetime):
Transfrom in date format for IMAP Request
"""
if a_datetime:
-
+
month = MONTH_CONV[a_datetime.month]
-
- pattern = '%%d-%s-%%Y' % (month)
-
+
+ pattern = '%%d-%s-%%Y' % (month)
+
return a_datetime.strftime(pattern)
-
+
def e2datetime(a_epoch):
"""
@@ -396,7 +401,7 @@ def contains_any(string, char_set):
def makedirs(a_path):
""" my own version of makedir """
-
+
if os.path.isdir(a_path):
# it already exists so return
return
@@ -410,15 +415,16 @@ def __rmgeneric(path, __func__):
try:
__func__(path)
#print 'Removed ', path
- except OSError, (_, strerror): #IGNORE:W0612
- print """Error removing %(path)s, %(error)s """ % {'path' : path, 'error': strerror }
-
+ except OSError as xxx_todo_changeme: #IGNORE:W0612
+ (_, strerror) = xxx_todo_changeme.args #IGNORE:W0612
+ print("""Error removing %(path)s, %(error)s """ % {'path' : path, 'error': strerror })
+
def delete_all_under(path, delete_top_dir = False):
""" delete all files and directories under path """
if not os.path.isdir(path):
return
-
+
files = os.listdir(path)
for the_f in files:
@@ -430,7 +436,7 @@ def delete_all_under(path, delete_top_dir = False):
delete_all_under(fullpath)
new_f = os.rmdir
__rmgeneric(fullpath, new_f)
-
+
if delete_top_dir:
os.rmdir(path)
@@ -456,7 +462,7 @@ def ordered_dirwalk(a_dir, a_file_wildcards='*', a_dir_ignore_list=(), sort_func
for sub_dir in sort_func(sub_dirs):
if os.path.basename(sub_dir) not in a_dir_ignore_list:
for p_elem in ordered_dirwalk(sub_dir, a_file_wildcards):
- yield p_elem
+ yield p_elem
else:
LOG.debug("Ignore subdir %s" % sub_dir)
@@ -467,7 +473,7 @@ def dirwalk(a_dir, a_wildcards='*'):
for root, _, files in os.walk(a_dir):
for the_file in files:
if fnmatch.fnmatch(the_file, a_wildcards):
- yield os.path.join(root, the_file)
+ yield os.path.join(root, the_file)
def ascii_hex(a_str):
"""
@@ -502,14 +508,14 @@ def guess_encoding(byte_str, use_encoding_list=True):
"""
encoding = None
- if type(byte_str) == type(unicode()):
+ if type(byte_str) == type(six.text_type()):
raise GuessEncoding("Error. The passed string is a unicode string and not a byte string")
if use_encoding_list:
encoding_list = get_conf_defaults().get('Localisation', 'encoding_guess_list', DEFAULT_ENC_LIST)
for enc in encoding_list:
try:
- unicode(byte_str ,enc,"strict")
+ six.text_type(byte_str ,enc,"strict")
encoding = enc
except:
pass
@@ -548,12 +554,12 @@ def convert_to_unicode(a_str):
encoding = guess_encoding(a_str[:20000], use_encoding_list = False)
LOG.debug("Convert to %s" % (encoding))
- u_str = unicode(a_str, encoding = encoding) #convert to unicode with given encoding
- except Exception, e:
+ u_str = six.text_type(a_str, encoding = encoding) #convert to unicode with given encoding
+ except Exception as e:
LOG.debug("Exception: %s" % (e))
LOG.info("Warning: Guessed encoding = (%s). Ignore those characters" % (encoding if encoding else "Not defined"))
#try utf-8
- u_str = unicode(a_str, encoding="utf-8", errors='replace')
+ u_str = six.text_type(a_str, encoding="utf-8", errors='replace')
return u_str
@@ -562,7 +568,7 @@ def convert_argv_to_unicode(a_str):
Convert command line individual arguments (argv to unicode)
"""
#if str is already unicode do nothing and return the str
- if type(a_str) == type(unicode()):
+ if type(a_str) == type(six.text_type()):
return a_str
#encoding can be forced from conf
@@ -579,19 +585,19 @@ def convert_argv_to_unicode(a_str):
LOG.debug("Cannot Terminal encoding using locale.getpreferredencoding() and locale.getdefaultlocale(), loc = %s. Use chardet to try guessing the encoding." % (loc if loc else "None"))
terminal_encoding = guess_encoding(a_str)
else:
- LOG.debug("Use terminal encoding forced from the configuration file.")
+ LOG.debug("Use terminal encoding forced from the configuration file.")
try:
LOG.debug("terminal encoding = %s." % (terminal_encoding))
#decode byte string to unicode and fails in case of error
u_str = a_str.decode(terminal_encoding)
LOG.debug("unicode_escape val = %s." % (u_str.encode('unicode_escape')))
LOG.debug("raw unicode = %s." % (u_str))
- except Exception, err:
+ except Exception as err:
LOG.error(err)
get_exception_traceback()
LOG.info("Convertion of %s from %s to a unicode failed. Will now convert to unicode using utf-8 encoding and ignoring errors (non utf-8 characters will be eaten)." % (a_str, terminal_encoding))
LOG.info("Please set properly the Terminal encoding or use the [Localisation]:terminal_encoding property to set it.")
- u_str = unicode(a_str, encoding='utf-8', errors='ignore')
+ u_str = six.text_type(a_str, encoding='utf-8', errors='ignore')
return u_str
@@ -601,16 +607,16 @@ def get_home_dir_path():
Get the gmvault dir
"""
gmvault_dir = os.getenv("GMVAULT_DIR", None)
-
+
# check by default in user[HOME]
if not gmvault_dir:
LOG.debug("no ENV variable $GMVAULT_DIR defined. Set by default $GMVAULT_DIR to $HOME/.gmvault (%s/.gmvault)" \
% (os.getenv("HOME",".")))
gmvault_dir = "%s/.gmvault" % (os.getenv("HOME", "."))
-
+
#create dir if not there
makedirs(gmvault_dir)
-
+
return gmvault_dir
CONF_FILE = "gmvault_defaults.conf"
@@ -622,19 +628,19 @@ def get_conf_defaults():
Beware it is memoized
"""
filepath = get_conf_filepath()
-
+
if filepath:
-
- os.environ[gmv.conf.conf_helper.Conf.ENVNAME] = filepath
-
- the_cf = gmv.conf.conf_helper.Conf.get_instance()
-
+
+ os.environ[conf_helper.Conf.ENVNAME] = filepath
+
+ the_cf = conf_helper.Conf.get_instance()
+
LOG.debug("Load defaults from %s" % (filepath))
-
+
return the_cf
else:
- return gmv.conf.conf_helper.MockConf() #retrun MockObject that will play defaults
-
+ return conf_helper.MockConf() #retrun MockObject that will play defaults
+
#VERSION DETECTION PATTERN
VERSION_PATTERN = r'\s*conf_version=\s*(?P\S*)\s*'
VERSION_RE = re.compile(VERSION_PATTERN)
@@ -655,7 +661,7 @@ def _get_version_from_conf(home_conf_file):
if matched:
ver = matched.group('version')
return ver.strip()
-
+
return ver
def _create_default_conf_file(home_conf_file):
@@ -667,7 +673,7 @@ def _create_default_conf_file(home_conf_file):
with open(home_conf_file, "w+") as f:
f.write(gmvault_const.DEFAULT_CONF_FILE)
return home_conf_file
- except Exception, err:
+ except Exception as err:
#catch all error and let run gmvault with defaults if needed
LOG.critical("Ignore Error when trying to create conf file for defaults in %s:\n%s.\n" % (get_home_dir_path(), err))
LOG.debug("=== Exception traceback ===")
@@ -682,7 +688,7 @@ def get_conf_filepath():
If it cannot be created, then return None
"""
home_conf_file = "%s/%s" % (get_home_dir_path(), CONF_FILE)
-
+
if not os.path.exists(home_conf_file):
return _create_default_conf_file(home_conf_file)
else:
@@ -691,14 +697,14 @@ def get_conf_filepath():
if version not in VERSIONS_TO_PRESERVE:
LOG.debug("%s with version %s is too old, overwrite it with the latest file." \
% (home_conf_file, version))
- return _create_default_conf_file(home_conf_file)
-
+ return _create_default_conf_file(home_conf_file)
+
return home_conf_file
def chunker(seq, size):
"""Returns the contents of `seq` in chunks of up to `size` items."""
- return (seq[pos:pos + size] for pos in xrange(0, len(seq), size))
+ return (seq[pos:pos + size] for pos in range(0, len(seq), size))
def escape_url(text):
@@ -725,6 +731,6 @@ def format_url_params(params):
:return: A URL query string version of the given dict.
"""
param_elements = []
- for param in sorted(params.iteritems(), key=lambda x: x[0]):
+ for param in sorted(six.iteritems(params), key=lambda x: x[0]):
param_elements.append('%s=%s' % (param[0], escape_url(param[1])))
return '&'.join(param_elements)
diff --git a/src/gmv/imap_utils.py b/src/gmv/imap_utils.py
index f30be16c..8d68b2c4 100755
--- a/src/gmv/imap_utils.py
+++ b/src/gmv/imap_utils.py
@@ -19,22 +19,25 @@
Module containing the IMAPFetcher object which is the Wrapper around the modified IMAPClient object
'''
+from __future__ import absolute_import
+
+import functools
import math
import time
import socket
import re
-import functools
-
import ssl
import imaplib
-import gmv.gmvault_const as gmvault_const
-import gmv.log_utils as log_utils
-import gmv.credential_utils as credential_utils
+from . import gmvault_const
+from . import log_utils
+from . import credential_utils
+from . import gmvault_utils
+from . import mod_imap as mimap
-import gmv.gmvault_utils as gmvault_utils
-import gmv.mod_imap as mimap
+import six
+from six.moves import map, range
LOG = log_utils.LoggerFactory.get_logger('imap_utils')
@@ -48,7 +51,7 @@ def __init__(self, a_msg, quarantined = False):
"""
super(PushEmailError, self).__init__(a_msg)
self._in_quarantine = quarantined
-
+
def quarantined(self):
""" Get email to quarantine """
return self._in_quarantine
@@ -63,7 +66,7 @@ def __init__(self, a_msg, ignore = False):
"""
super(LabelError, self).__init__(a_msg)
self._ignore = ignore
-
+
def ignore(self):
""" ignore """
return self._ignore
@@ -86,43 +89,43 @@ def retry(a_nb_tries=3, a_sleep_time=1, a_backoff=1): #pylint:disable=R0912
if a_sleep_time <= 0:
raise ValueError("a_sleep_time must be greater than 0")
-
+
def reconnect(the_self, rec_nb_tries, total_nb_tries, rec_error, rec_sleep_time = [1]): #pylint: disable=W0102
"""
Reconnect procedure. Sleep and try to reconnect
"""
# go in retry mode if less than a_nb_tries
while rec_nb_tries[0] < total_nb_tries:
-
+
LOG.critical("Disconnecting from Gmail Server and sleeping ...")
- the_self.disconnect()
-
+ the_self.disconnect()
+
# add X sec of wait
time.sleep(rec_sleep_time[0])
rec_sleep_time[0] *= a_backoff #increase sleep time for next time
-
+
rec_nb_tries[0] += 1
-
+
#increase total nb of reconns
the_self.total_nb_reconns += 1
-
+
# go in retry mode: reconnect.
# retry reconnect as long as we have tries left
try:
LOG.critical("Reconnecting to the from Gmail Server.")
-
+
#reconnect to the current folder
the_self.connect(go_to_current_folder = True )
-
- return
-
- except Exception, ignored:
+
+ return
+
+ except Exception as ignored:
# catch all errors and try as long as we have tries left
LOG.exception(ignored)
else:
#cascade error
raise rec_error
-
+
def inner_retry(the_func): #pylint:disable=C0111,R0912
def wrapper(*args, **kwargs): #pylint:disable=C0111,R0912
nb_tries = [0] # make it mutable in reconnect
@@ -130,60 +133,60 @@ def wrapper(*args, **kwargs): #pylint:disable=C0111,R0912
while True:
try:
return the_func(*args, **kwargs)
- except PushEmailError, p_err:
-
+ except PushEmailError as p_err:
+
LOG.debug("error message = %s. traceback:%s" % (p_err, gmvault_utils.get_exception_traceback()))
-
+
if nb_tries[0] < a_nb_tries:
LOG.critical("Cannot reach the Gmail server. Wait %s second(s) and retrying." % (m_sleep_time[0]))
else:
LOG.critical("Stop retrying, tried too many times ...")
-
+
reconnect(args[0], nb_tries, a_nb_tries, p_err, m_sleep_time)
-
- except imaplib.IMAP4.abort, err: #abort is recoverable and error is not
-
+
+ except imaplib.IMAP4.abort as err: #abort is recoverable and error is not
+
LOG.debug("IMAP (abort) error message = %s. traceback:%s" % (err, gmvault_utils.get_exception_traceback()))
-
+
if nb_tries[0] < a_nb_tries:
LOG.critical("Received an IMAP abort error. Wait %s second(s) and retrying." % (m_sleep_time[0]))
else:
LOG.critical("Stop retrying, tried too many times ...")
-
+
# problem with this email, put it in quarantine
- reconnect(args[0], nb_tries, a_nb_tries, err, m_sleep_time)
-
- except socket.error, sock_err:
+ reconnect(args[0], nb_tries, a_nb_tries, err, m_sleep_time)
+
+ except socket.error as sock_err:
LOG.debug("error message = %s. traceback:%s" % (sock_err, gmvault_utils.get_exception_traceback()))
-
+
if nb_tries[0] < a_nb_tries:
LOG.critical("Cannot reach the Gmail server. Wait %s second(s) and retrying." % (m_sleep_time[0]))
else:
LOG.critical("Stop retrying, tried too many times ...")
-
+
reconnect(args[0], nb_tries, a_nb_tries, sock_err, m_sleep_time)
-
- except ssl.SSLError, ssl_err:
+
+ except ssl.SSLError as ssl_err:
LOG.debug("error message = %s. traceback:%s" % (ssl_err, gmvault_utils.get_exception_traceback()))
-
+
if nb_tries[0] < a_nb_tries:
LOG.critical("Cannot reach the Gmail server. Wait %s second(s) and retrying." % (m_sleep_time[0]))
else:
LOG.critical("Stop retrying, tried too many times ...")
-
+
reconnect(args[0], nb_tries, a_nb_tries, sock_err, m_sleep_time)
-
- except imaplib.IMAP4.error, err:
-
+
+ except imaplib.IMAP4.error as err:
+
#just trace it back for the moment
LOG.debug("IMAP (normal) error message = %s. traceback:%s" % (err, gmvault_utils.get_exception_traceback()))
-
+
if nb_tries[0] < a_nb_tries:
LOG.critical("Error when reaching Gmail server. Wait %s second(s) and retry up to 2 times." \
% (m_sleep_time[0]))
else:
LOG.critical("Stop retrying, tried too many times ...")
-
+
#raise err
# retry 2 times before to quit
reconnect(args[0], nb_tries, 2, err, m_sleep_time)
@@ -198,51 +201,51 @@ class GIMAPFetcher(object): #pylint:disable=R0902,R0904
'''
GMAIL_EXTENSION = 'X-GM-EXT-1' # GMAIL capability
GMAIL_ALL = u'[Gmail]/All Mail' #GMAIL All Mail mailbox
-
+
GENERIC_GMAIL_ALL = u'\\AllMail' # unlocalised GMAIL ALL
GENERIC_DRAFTS = u'\\Drafts' # unlocalised DRAFTS
GENERIC_GMAIL_CHATS = gmvault_const.GMAIL_UNLOCAL_CHATS # unlocalised Chats names
-
+
FOLDER_NAMES = ['ALLMAIL', 'CHATS', 'DRAFTS']
-
+
GMAIL_ID = 'X-GM-MSGID' #GMAIL ID attribute
GMAIL_THREAD_ID = 'X-GM-THRID'
GMAIL_LABELS = 'X-GM-LABELS'
-
+
IMAP_INTERNALDATE = 'INTERNALDATE'
IMAP_FLAGS = 'FLAGS'
IMAP_ALL = {'type':'imap', 'req':'ALL'}
-
+
EMAIL_BODY = 'BODY[]'
-
+
GMAIL_SPECIAL_DIRS = ['\\Inbox', '\\Starred', '\\Sent', '\\Draft', '\\Important']
-
+
#GMAIL_SPECIAL_DIRS_LOWER = ['\\inbox', '\\starred', '\\sent', '\\draft', '\\important']
GMAIL_SPECIAL_DIRS_LOWER = ['\\inbox', '\\starred', '\\sent', '\\draft', '\\important', '\\trash']
-
+
IMAP_BODY_PEEK = 'BODY.PEEK[]' #get body without setting msg as seen
#get the body info without setting msg as seen
- IMAP_HEADER_PEEK_FIELDS = 'BODY.PEEK[HEADER.FIELDS (MESSAGE-ID SUBJECT X-GMAIL-RECEIVED)]'
+ IMAP_HEADER_PEEK_FIELDS = 'BODY.PEEK[HEADER.FIELDS (MESSAGE-ID SUBJECT X-GMAIL-RECEIVED)]'
#key used to find these fields in the IMAP Response
IMAP_HEADER_FIELDS_KEY = 'BODY[HEADER.FIELDS (MESSAGE-ID SUBJECT X-GMAIL-RECEIVED)]'
-
+
#GET_IM_UID_RE
APPENDUID = r'^[APPENDUID [0-9]* ([0-9]*)] \(Success\)$'
-
+
APPENDUID_RE = re.compile(APPENDUID)
-
+
GET_ALL_INFO = [ GMAIL_ID, GMAIL_THREAD_ID, GMAIL_LABELS, IMAP_INTERNALDATE, \
IMAP_BODY_PEEK, IMAP_FLAGS, IMAP_HEADER_PEEK_FIELDS]
GET_ALL_BUT_DATA = [ GMAIL_ID, GMAIL_THREAD_ID, GMAIL_LABELS, IMAP_INTERNALDATE, \
IMAP_FLAGS, IMAP_HEADER_PEEK_FIELDS]
-
+
GET_DATA_ONLY = [ GMAIL_ID, IMAP_BODY_PEEK]
-
+
GET_GMAIL_ID = [ GMAIL_ID ]
-
+
GET_GMAIL_ID_DATE = [ GMAIL_ID, IMAP_INTERNALDATE]
def __init__(self, host, port, login, credential, readonly_folder = True): #pylint:disable=R0913
@@ -257,23 +260,23 @@ def __init__(self, host, port, login, credential, readonly_folder = True): #pyli
self.ssl = True
self.use_uid = True
self.readonly_folder = readonly_folder
-
- self.localized_folders = { 'ALLMAIL': { 'loc_dir' : None, 'friendly_name' : 'allmail'},
- 'CHATS' : { 'loc_dir' : None, 'friendly_name' : 'chats'},
+
+ self.localized_folders = { 'ALLMAIL': { 'loc_dir' : None, 'friendly_name' : 'allmail'},
+ 'CHATS' : { 'loc_dir' : None, 'friendly_name' : 'chats'},
'DRAFTS' : { 'loc_dir' : None, 'friendly_name' : 'drafts'} }
-
+
# memoize the current folder (All Mail or Chats) for reconnection management
self.current_folder = None
-
+
self.server = None
self.go_to_all_folder = True
self.total_nb_reconns = 0
# True when CHATS or other folder error msg has been already printed
self.printed_folder_error_msg = { 'ALLMAIL' : False, 'CHATS': False , 'DRAFTS':False }
-
+
#update GENERIC_GMAIL_CHATS. Should be done at the class level
self.GENERIC_GMAIL_CHATS.extend(gmvault_utils.get_conf_defaults().get_list('Localisation', 'chat_folder', []))
-
+
def spawn_connection(self):
"""
spawn a connection with the same parameters
@@ -281,7 +284,7 @@ def spawn_connection(self):
conn = GIMAPFetcher(self.host, self.port, self.login, self.credential, self.readonly_folder)
conn.connect()
return conn
-
+
def connect(self, go_to_current_folder = False):
"""
connect to the IMAP server
@@ -302,26 +305,26 @@ def connect(self, go_to_current_folder = False):
else:
raise Exception("Unknown authentication method %s. Please use xoauth or passwd authentication " \
% (self.credential['type']))
-
+
#set connected to True to handle reconnection in case of failure
self.once_connected = True
-
+
# check gmailness
self.check_gmailness()
-
+
# find allmail chats and drafts folders
self.find_folder_names()
if go_to_current_folder and self.current_folder:
self.server.select_folder(self.current_folder, readonly = self.readonly_folder)
-
+
#enable compression
if gmvault_utils.get_conf_defaults().get_boolean('General', 'enable_imap_compression', True):
self.enable_compression()
LOG.debug("After Enabling compression.")
else:
- LOG.debug("Do not enable imap compression.")
-
+ LOG.debug("Do not enable imap compression.")
+
def disconnect(self):
"""
disconnect to avoid too many simultaneous connection problem
@@ -329,18 +332,18 @@ def disconnect(self):
if self.server:
try:
self.server.logout()
- except Exception, ignored: #ignored exception but still log it in log file if activated
+ except Exception as ignored: #ignored exception but still log it in log file if activated
LOG.exception(ignored)
-
+
self.server = None
-
+
def reconnect(self):
"""
disconnect and connect again
"""
self.disconnect()
self.connect()
-
+
def enable_compression(self):
"""
Try to enable the compression
@@ -351,13 +354,13 @@ def enable_compression(self):
@retry(3,1,2) # try 3 times to reconnect with a sleep time of 1 sec and a backoff of 2. The fourth time will wait 4 sec
def find_folder_names(self):
"""
- depending on your account the all mail folder can be named
+ depending on your account the all mail folder can be named
[GMAIL]/ALL Mail or [GoogleMail]/All Mail.
Find and set the right one
- """
+ """
#use xlist because of localized dir names
folders = self.server.xlist_folders()
-
+
the_dir = None
for (flags, _, the_dir) in folders:
#non localised GMAIL_ALL
@@ -369,27 +372,27 @@ def find_folder_names(self):
self.localized_folders['CHATS']['loc_dir'] = the_dir
elif GIMAPFetcher.GENERIC_DRAFTS in flags:
self.localized_folders['DRAFTS']['loc_dir'] = the_dir
-
+
if not self.localized_folders['ALLMAIL']['loc_dir']: # all mail error
raise Exception("Cannot find global 'All Mail' folder (maybe localized and translated into your language) ! "\
"Check whether 'Show in IMAP for 'All Mail' is enabled in Gmail (Go to Settings->Labels->All Mail)")
elif not self.localized_folders['CHATS']['loc_dir'] and \
gmvault_utils.get_conf_defaults().getboolean("General","errors_if_chat_not_visible", False):
raise Exception("Cannot find global 'Chats' folder ! Check whether 'Show in IMAP for 'Chats' "\
- "is enabled in Gmail (Go to Settings->Labels->All Mail)")
+ "is enabled in Gmail (Go to Settings->Labels->All Mail)")
elif not self.localized_folders['DRAFTS']['loc_dir']:
raise Exception("Cannot find global 'Drafts' folder.")
-
+
@retry(3,1,2) # try 3 times to reconnect with a sleep time of 1 sec and a backoff of 2. The fourth time will wait 4 sec
def find_all_mail_folder(self):
"""
- depending on your account the all mail folder can be named
+ depending on your account the all mail folder can be named
[GMAIL]/ALL Mail or [GoogleMail]/All Mail.
Find and set the right one
- """
+ """
#use xlist because of localized dir names
folders = self.server.xlist_folders()
-
+
the_dir = None
for (flags, _, the_dir) in folders:
#non localised GMAIL_ALL
@@ -397,25 +400,25 @@ def find_all_mail_folder(self):
#it could be a localized Dir name
self.localized_folders['ALLMAIL']['loc_dir'] = the_dir
return the_dir
-
+
if not self.localized_folders['ALLMAIL']['loc_dir']:
#Error
raise Exception("Cannot find global 'All Mail' folder (maybe localized and translated into your language) !"\
" Check whether 'Show in IMAP for 'All Mail' is enabled in Gmail (Go to Settings->Labels->All Mail)")
-
+
@retry(3,1,2) # try 3 times to reconnect with a sleep time of 1 sec and a backoff of 2. The fourth time will wait 4 sec
def find_chats_folder(self):
"""
- depending on your account the chats folder can be named
+ depending on your account the chats folder can be named
[GMAIL]/Chats or [GoogleMail]/Chats, [GMAIL]/tous les chats ...
Find and set the right one
Npte: Cannot use the flags as Chats is not a system label. Thanks Google
"""
#use xlist because of localized dir names
folders = self.server.xlist_folders()
-
+
LOG.debug("Folders = %s\n" % (folders))
-
+
the_dir = None
for (_, _, the_dir) in folders:
#look for GMAIL Chats
@@ -423,40 +426,40 @@ def find_chats_folder(self):
#it could be a localized Dir name
self.localized_folders['CHATS']['loc_dir'] = the_dir
return the_dir
-
- #Error did not find Chats dir
+
+ #Error did not find Chats dir
if gmvault_utils.get_conf_defaults().getboolean("General", "errors_if_chat_not_visible", False):
raise Exception("Cannot find global 'Chats' folder ! Check whether 'Show in IMAP for 'Chats' "\
- "is enabled in Gmail (Go to Settings->Labels->All Mail)")
-
+ "is enabled in Gmail (Go to Settings->Labels->All Mail)")
+
return None
-
+
def is_visible(self, a_folder_name):
"""
- check if a folder is visible otherwise
+ check if a folder is visible otherwise
"""
dummy = self.localized_folders.get(a_folder_name)
-
+
if dummy and (dummy.get('loc_dir', None) is not None):
return True
-
- if not self.printed_folder_error_msg.get(a_folder_name, None):
+
+ if not self.printed_folder_error_msg.get(a_folder_name, None):
LOG.critical("Cannot find 'Chats' folder on Gmail Server. If you wish to backup your chats,"\
" look at the documentation to see how to configure your Gmail account.\n")
self.printed_folder_error_msg[a_folder_name] = True
-
-
+
+
return False
def get_folder_name(self, a_folder_name):
- """return real folder name from generic ones"""
+ """return real folder name from generic ones"""
if a_folder_name not in self.FOLDER_NAMES:
raise Exception("%s is not a predefined folder names. Please use one" % (a_folder_name) )
-
+
folder = self.localized_folders.get(a_folder_name, {'loc_dir' : 'GMVNONAME'})['loc_dir']
return folder
-
+
@retry(3,1,2) # try 3 times to reconnect with a sleep time of 1 sec and a backoff of 2. The fourth time will wait 4 sec
def select_folder(self, a_folder_name, use_predef_names = True):
"""
@@ -465,26 +468,26 @@ def select_folder(self, a_folder_name, use_predef_names = True):
if use_predef_names:
if a_folder_name not in self.FOLDER_NAMES:
raise Exception("%s is not a predefined folder names. Please use one" % (a_folder_name) )
-
+
folder = self.localized_folders.get(a_folder_name, {'loc_dir' : 'GMVNONAME'})['loc_dir']
-
+
if self.current_folder != folder:
self.server.select_folder(folder, readonly = self.readonly_folder)
self.current_folder = folder
-
+
elif self.current_folder != a_folder_name:
self.server.select_folder(a_folder_name, readonly = self.readonly_folder)
self.current_folder = a_folder_name
-
+
return self.current_folder
-
+
@retry(3,1,2) # try 3 times to reconnect with a sleep time of 1 sec and a backoff of 2. The fourth time will wait 4 sec
- def list_all_folders(self):
+ def list_all_folders(self):
"""
Return all folders mainly for debuging purposes
"""
return self.server.xlist_folders()
-
+
@retry(3,1,2) # try 3 times to reconnect with a sleep time of 1 sec and a backoff of 2. The fourth time will wait 4 sec
def get_capabilities(self):
"""
@@ -492,9 +495,9 @@ def get_capabilities(self):
"""
if not self.server:
raise Exception("GIMAPFetcher not connect to the GMAIL server")
-
+
return self.server.capabilities()
-
+
@retry(3,1,2) # try 3 times to reconnect with a sleep time of 1 sec and a backoff of 2. The fourth time will wait 4 sec
def check_gmailness(self):
"""
@@ -503,16 +506,16 @@ def check_gmailness(self):
if not GIMAPFetcher.GMAIL_EXTENSION in self.get_capabilities():
raise Exception("GIMAPFetcher is not connected to a IMAP GMAIL server. Please check host (%s) and port (%s)" \
% (self.host, self.port))
-
+
return True
-
+
@retry(3,1,2) # try 3 times to reconnect with a sleep time of 1 sec and a backoff of 2. The fourth time will wait 4 sec
def search(self, a_criteria):
"""
Return all found ids corresponding to the search
"""
return self.server.search(a_criteria)
-
+
@retry(3,1,2) # try 4 times to reconnect with a sleep time of 1 sec and a backoff of 2. The fourth time will wait 8 sec
def fetch(self, a_ids, a_attributes):
"""
@@ -523,7 +526,7 @@ def fetch(self, a_ids, a_attributes):
@classmethod
def _build_labels_str(cls, a_labels):
"""
- Create IMAP label string from list of given labels.
+ Create IMAP label string from list of given labels.
Convert the labels to utf7
a_labels: List of labels
"""
@@ -540,19 +543,19 @@ def _build_labels_str(cls, a_labels):
label = label.replace('"', '\\"') #replace quote with escaped quotes
labels_str += '\"%s\" ' % (label)
labels_str = '%s%s' % (labels_str[:-1],')')
-
+
return labels_str
-
+
@classmethod
def _get_dir_from_labels(cls, label):
"""
Get the dirs to create from the labels
-
+
label: label name with / in it
"""
-
+
dirs = []
-
+
i = 0
for lab in label.split('/'):
lab = gmvault_utils.remove_consecutive_spaces_and_strip(lab)
@@ -560,50 +563,50 @@ def _get_dir_from_labels(cls, label):
dirs.append(lab)
else:
dirs.append('%s/%s' % (dirs[i-1], lab))
-
+
i += 1
-
+
return dirs
-
+
def create_gmail_labels(self, labels, existing_folders):
"""
Create folders and subfolders on Gmail in order
to recreate the label hierarchy before to upload emails
Note that adding labels with +X-GM-LABELS create only nested labels
- but not nested ones. This is why this trick must be used to
+ but not nested ones. This is why this trick must be used to
recreate the label hierarchy
-
+
labels: list of labels to create
-
+
"""
-
+
#1.5-beta moved that out of the loop to minimize the number of calls
#to that method. (Could go further and memoize it)
-
+
#get existing directories (or label parts)
# get in lower case because Gmail labels are case insensitive
listed_folders = set([ directory.lower() for (_, _, directory) in self.list_all_folders() ])
existing_folders = listed_folders.union(existing_folders)
reserved_labels_map = gmvault_utils.get_conf_defaults().get_dict("Restore", "reserved_labels_map", \
{ u'migrated' : u'gmv-migrated', u'\muted' : u'gmv-muted' })
-
-
+
+
LOG.debug("Labels to create: [%s]" % (labels))
-
+
for lab in labels:
#LOG.info("Reserved labels = %s\n" % (reserved_labels))
#LOG.info("lab.lower = %s\n" % (lab.lower()))
- if lab.lower() in reserved_labels_map.keys(): #exclude creation of migrated label
+ if lab.lower() in list(reserved_labels_map.keys()): #exclude creation of migrated label
n_lab = reserved_labels_map.get(lab.lower(), "gmv-default-label")
LOG.info("Warning ! label '%s' (lower or uppercase) is reserved by Gmail and cannot be used."\
- "Use %s instead" % (lab, n_lab))
+ "Use %s instead" % (lab, n_lab))
lab = n_lab
LOG.info("translated lab = %s\n" % (lab))
-
+
#split all labels
- labs = self._get_dir_from_labels(lab)
-
+ labs = self._get_dir_from_labels(lab)
+
for directory in labs:
low_directory = directory.lower() #get lower case directory but store original label
if (low_directory not in existing_folders) and (low_directory not in self.GMAIL_SPECIAL_DIRS_LOWER):
@@ -612,7 +615,7 @@ def create_gmail_labels(self, labels, existing_folders):
raise Exception("Cannot create label %s: the directory %s cannot be created." % (lab, directory))
else:
LOG.debug("============== ####### Created Labels (%s)." % (directory))
- except imaplib.IMAP4.error, error:
+ except imaplib.IMAP4.error as error:
#log error in log file if it exists
LOG.debug(gmvault_utils.get_exception_traceback())
if str(error).startswith("create failed: '[ALREADYEXISTS] Duplicate folder"):
@@ -620,14 +623,14 @@ def create_gmail_labels(self, labels, existing_folders):
" Ignore this issue." % (directory) )
else:
raise error
-
+
#add created folder in folders
existing_folders.add(low_directory)
#return all existing folders
return existing_folders
-
-
+
+
@retry(3,1,2)
def apply_labels_to(self, imap_ids, labels):
"""
@@ -635,7 +638,7 @@ def apply_labels_to(self, imap_ids, labels):
"""
# go to All Mail folder
LOG.debug("Applying labels %s" % (labels))
-
+
the_timer = gmvault_utils.Timer()
the_timer.start()
@@ -643,16 +646,16 @@ def apply_labels_to(self, imap_ids, labels):
labels = [ utf7_encode(label) for label in labels ]
labels_str = self._build_labels_str(labels) # create labels str
-
- if labels_str:
- #has labels so update email
+
+ if labels_str:
+ #has labels so update email
the_timer.start()
LOG.debug("Before to store labels %s" % (labels_str))
id_list = ",".join(map(str, imap_ids))
#+X-GM-LABELS.SILENT to have not returned data
try:
ret_code, data = self.server._imap.uid('STORE', id_list, '+X-GM-LABELS.SILENT', labels_str) #pylint: disable=W0212
- except imaplib.IMAP4.error, original_err:
+ except imaplib.IMAP4.error as original_err:
LOG.info("Error in apply_labels_to. See exception traceback")
LOG.debug(gmvault_utils.get_exception_traceback())
# try to add labels to each individual ids
@@ -660,12 +663,12 @@ def apply_labels_to(self, imap_ids, labels):
for the_id in imap_ids:
try:
ret_code, data = self.server._imap.uid('STORE', the_id, '+X-GM-LABELS.SILENT', labels_str) #pylint: disable=W0212
- except imaplib.IMAP4.error, store_err:
+ except imaplib.IMAP4.error as store_err:
LOG.debug("Error when trying to apply labels %s to emails with imap_id %s. Error:%s" % (labels_str, the_id, store_err))
faulty_ids.append(the_id)
-
- #raise an error to ignore faulty emails
- raise LabelError("Cannot add Labels %s to emails with uids %s. Error:%s" % (labels_str, faulty_ids, original_err), ignore = True)
+
+ #raise an error to ignore faulty emails
+ raise LabelError("Cannot add Labels %s to emails with uids %s. Error:%s" % (labels_str, faulty_ids, original_err), ignore = True)
#ret_code, data = self.server._imap.uid('COPY', id_list, labels[0])
LOG.debug("After storing labels %s. Operation time = %s s.\nret = %s\ndata=%s" \
@@ -678,37 +681,37 @@ def apply_labels_to(self, imap_ids, labels):
for the_id in imap_ids:
try:
ret_code, data = self.server._imap.uid('STORE', the_id, '+X-GM-LABELS.SILENT', labels_str) #pylint: disable=W0212
- except imaplib.IMAP4.error, store_err:
+ except imaplib.IMAP4.error as store_err:
LOG.debug("Error when trying to apply labels %s to emails with imap_id %s. Error:%s" % (labels_str, the_id, store_err))
faulty_ids.append(the_id)
raise LabelError("Cannot add Labels %s to emails with uids %s. Error:%s" % (labels_str, faulty_ids, data), ignore = True)
else:
LOG.debug("Stored Labels %s for gm_ids %s" % (labels_str, imap_ids))
-
+
def delete_gmail_labels(self, labels, force_delete = False):
"""
Delete passed labels. Beware experimental and labels must be ordered
"""
for label in reversed(labels):
-
+
labs = self._get_dir_from_labels(label)
-
+
for directory in reversed(labs):
-
+
if force_delete or ( (directory.lower() not in self.GMAIL_SPECIAL_DIRS_LOWER) \
and self.server.folder_exists(directory) ): #call server exists each time
try:
self.server.delete_folder(directory)
- except imaplib.IMAP4.error, _:
+ except imaplib.IMAP4.error as _:
LOG.debug(gmvault_utils.get_exception_traceback())
-
-
+
+
def erase_mailbox(self):
"""
This is for testing purpose and cannot be used with my own mailbox
"""
-
+
if self.login == "guillaume.aubert@gmail.com":
raise Exception("Error cannot activate erase_mailbox with %s" % (self.login))
@@ -729,14 +732,14 @@ def erase_mailbox(self):
or (the_dir == u'[Google Mail]') or (u'\\Trash' in flags) or \
(u'\\Inbox' in flags) or (GIMAPFetcher.GENERIC_GMAIL_ALL in flags) or \
(GIMAPFetcher.GENERIC_DRAFTS in flags) or (GIMAPFetcher.GENERIC_GMAIL_CHATS in flags):
- LOG.info("Ignore folder %s" % (the_dir))
+ LOG.info("Ignore folder %s" % (the_dir))
if (u'\\Trash' in flags): #keep trash folder name
trash_folder_name = the_dir
else:
LOG.info("Delete folder %s" % (the_dir))
self.server.delete_folder(the_dir)
-
+
self.select_folder('ALLMAIL')
#self.server.store("1:*",'+X-GM-LABELS', '\\Trash')
@@ -744,7 +747,7 @@ def erase_mailbox(self):
#self.server.add_gmail_labels(self, messages, labels)
LOG.info("Move emails to Trash.")
-
+
# get all imap ids in ALLMAIL
imap_ids = self.search(GIMAPFetcher.IMAP_ALL)
@@ -756,7 +759,7 @@ def erase_mailbox(self):
LOG.info("Got all imap_ids flagged to Trash : %s." % (imap_ids))
-
+
else:
LOG.info("No messages to erase.")
@@ -766,7 +769,7 @@ def erase_mailbox(self):
raise Exception("No trash folder ???")
self.select_folder(trash_folder_name, False)
-
+
# get all imap ids in ALLMAIL
imap_ids = self.search(GIMAPFetcher.IMAP_ALL)
@@ -777,21 +780,21 @@ def erase_mailbox(self):
LOG.info("Expunge everything.")
self.server.expunge()
- @retry(4,1,2) # try 4 times to reconnect with a sleep time of 1 sec and a backoff of 2. The fourth time will wait 8 sec
+ @retry(4,1,2) # try 4 times to reconnect with a sleep time of 1 sec and a backoff of 2. The fourth time will wait 8 sec
def push_data(self, a_folder, a_body, a_flags, a_internal_time):
"""
Push the data
- """
+ """
# protection against myself
if self.login == 'guillaume.aubert@gmail.com':
raise Exception("Cannot push to this account")
-
+
the_timer = gmvault_utils.Timer()
the_timer.start()
LOG.debug("Before to Append email contents")
#import sys #to print the msg in stdout
#import codecs
- #sys.stdout = codecs.getwriter('utf-8')(sys.__stdout__)
+ #sys.stdout = codecs.getwriter('utf-8')(sys.__stdout__)
#msg = "a_folder = %s, a_flags = %s" % (a_folder.encode('utf-8'), a_flags)
#msg = "a_folder = %s" % (a_folder.encode('utf-8'))
#msg = msg.encode('utf-8')
@@ -800,21 +803,21 @@ def push_data(self, a_folder, a_body, a_flags, a_internal_time):
try:
#a_body = self._clean_email_body(a_body)
res = self.server.append(a_folder, a_body, a_flags, a_internal_time)
- except imaplib.IMAP4.abort, err:
+ except imaplib.IMAP4.abort as err:
# handle issue when there are invalid characters (This is do to the presence of null characters)
if str(err).find("APPEND => Invalid character in literal") >= 0:
LOG.critical("Invalid character detected. Try to clean the email and reconnect.")
a_body = self._clean_email_body(a_body)
self.reconnect()
res = self.server.append(a_folder, a_body, a_flags, a_internal_time)
-
+
LOG.debug("Appended data with flags %s and internal time %s. Operation time = %s.\nres = %s\n" \
% (a_flags, a_internal_time, the_timer.elapsed_ms(), res))
-
+
# check res otherwise Exception
if '(Success)' not in res:
raise PushEmailError("GIMAPFetcher cannot restore email in %s account." %(self.login))
-
+
match = GIMAPFetcher.APPENDUID_RE.match(res)
if match:
result_uid = int(match.group(1))
@@ -822,8 +825,8 @@ def push_data(self, a_folder, a_body, a_flags, a_internal_time):
else:
# do not quarantine it because it seems to be done by Google Mail to forbid data uploading.
raise PushEmailError("No email id returned by IMAP APPEND command. Quarantine this email.", quarantined = True)
-
- return result_uid
+
+ return result_uid
def _clean_email_body(self, a_body):
"""
@@ -831,16 +834,16 @@ def _clean_email_body(self, a_body):
"""
#for the moment just try to remove the null character brut force. In the future will have to parse the email and clean it
return a_body.replace("\0", '')
-
+
@retry(4,1,2) # try 4 times to reconnect with a sleep time of 1 sec and a backoff of 2. The fourth time will wait 8 sec
def deprecated_push_email(self, a_body, a_flags, a_internal_time, a_labels):
"""
- Push a complete email body
+ Push a complete email body
"""
#protection against myself
if self.login == 'guillaume.aubert@gmail.com':
raise Exception("Cannot push to this account")
-
+
the_t = gmvault_utils.Timer()
the_t.start()
LOG.debug("Before to Append email contents")
@@ -848,19 +851,19 @@ def deprecated_push_email(self, a_body, a_flags, a_internal_time, a_labels):
try:
res = self.server.append(u'[Google Mail]/All Mail', a_body, a_flags, a_internal_time)
- except imaplib.IMAP4.abort, err:
+ except imaplib.IMAP4.abort as err:
# handle issue when there are invalid characters (This is do to the presence of null characters)
if str(err).find("APPEND => Invalid character in literal") >= 0:
a_body = self._clean_email_body(a_body)
res = self.server.append(u'[Google Mail]/All Mail', a_body, a_flags, a_internal_time)
-
+
LOG.debug("Appended data with flags %s and internal time %s. Operation time = %s.\nres = %s\n" \
% (a_flags, a_internal_time, the_t.elapsed_ms(), res))
-
+
# check res otherwise Exception
if '(Success)' not in res:
raise PushEmailError("GIMAPFetcher cannot restore email in %s account." %(self.login))
-
+
match = GIMAPFetcher.APPENDUID_RE.match(res)
if match:
result_uid = int(match.group(1))
@@ -868,11 +871,11 @@ def deprecated_push_email(self, a_body, a_flags, a_internal_time, a_labels):
else:
# do not quarantine it because it seems to be done by Google Mail to forbid data uploading.
raise PushEmailError("No email id returned by IMAP APPEND command. Quarantine this email.", quarantined = True)
-
+
labels_str = self._build_labels_str(a_labels)
-
- if labels_str:
- #has labels so update email
+
+ if labels_str:
+ #has labels so update email
the_t.start()
LOG.debug("Before to store labels %s" % (labels_str))
self.server.select_folder(u'[Google Mail]/All Mail', readonly = self.readonly_folder) # go to current folder
@@ -882,15 +885,15 @@ def deprecated_push_email(self, a_body, a_flags, a_internal_time, a_labels):
#ret_code = self.server._store('+X-GM-LABELS', [result_uid],labels_str)
LOG.debug("After storing labels %s. Operation time = %s s.\nret = %s\ndata=%s" \
% (labels_str, the_t.elapsed_ms(),ret_code, data))
-
+
LOG.debug("Stored Labels %s in gm_id %s" % (labels_str, result_uid))
self.server.select_folder(u'[Google Mail]/Drafts', readonly = self.readonly_folder) # go to current folder
-
+
# check if it is ok otherwise exception
if ret_code != 'OK':
raise PushEmailError("Cannot add Labels %s to email with uid %d. Error:%s" % (labels_str, result_uid, data))
-
+
return result_uid
def decode_labels(labels):
@@ -899,8 +902,8 @@ def decode_labels(labels):
"""
new_labels = []
for label in labels:
- if isinstance(label, (int, long, float, complex)):
- label = str(label)
+ if isinstance(label, (int, int, float, complex)):
+ label = str(label)
new_labels.append(utf7_decode(label))
return new_labels
@@ -915,7 +918,7 @@ def utf7_encode(s): #pylint: disable=C0103
r = [] #pylint: disable=C0103
_in = []
for c in s: #pylint: disable=C0103
- if ord(c) in (range(0x20, 0x26) + range(0x27, 0x7f)):
+ if ord(c) in (list(range(0x20, 0x26)) + list(range(0x27, 0x7f))):
if _in:
r.extend(['&', utf7_modified_base64(''.join(_in)), '-'])
del _in[:]
@@ -953,8 +956,8 @@ def utf7_decode(s): #pylint: disable=C0103
r.append(utf7_modified_unbase64(''.join(decode[1:])))
out = ''.join(r)
- if not isinstance(out, unicode):
- out = unicode(out, 'latin-1')
+ if not isinstance(out, six.text_type):
+ out = six.text_type(out, 'latin-1')
return out
diff --git a/src/gmv/log_utils.py b/src/gmv/log_utils.py
index 05a9ed4d..018bf5ae 100755
--- a/src/gmv/log_utils.py
+++ b/src/gmv/log_utils.py
@@ -16,6 +16,7 @@
along with this program. If not, see .
'''
+from __future__ import absolute_import
import sys
import os
diff --git a/src/gmv/mod_imap.py b/src/gmv/mod_imap.py
index edbcb160..7f66c74d 100755
--- a/src/gmv/mod_imap.py
+++ b/src/gmv/mod_imap.py
@@ -19,18 +19,20 @@
Contains the class monkey patching IMAPClient and imaplib
'''
+from __future__ import absolute_import, print_function
+
import zlib
import datetime
import re
import socket
import ssl
-import cStringIO
+import io
import os
import imaplib #for the exception
import imapclient
-#enable imap debugging if GMV_IMAP_DEBUG is set
+#enable imap debugging if GMV_IMAP_DEBUG is set
if os.getenv("GMV_IMAP_DEBUG"):
imaplib.Debug = 4 #enable debugging
@@ -55,22 +57,22 @@ def mod_convert_INTERNALDATE(date_string, normalise_times=True):#pylint: disable
mon = INTERNALDATE_RE.match('INTERNALDATE "%s"' % date_string)
if not mon:
raise ValueError("couldn't parse date %r" % date_string)
-
+
zoneh = int(mon.group('zoneh'))
zonem = (zoneh * 60) + int(mon.group('zonem'))
if mon.group('zonen') == '-':
zonem = -zonem
timez = imapclient.fixed_offset.FixedOffset(zonem)
-
+
year = int(mon.group('year'))
the_mon = MON2NUM[mon.group('mon')]
day = int(mon.group('day'))
hour = int(mon.group('hour'))
minute = int(mon.group('min'))
sec = int(mon.group('sec'))
-
+
the_dt = datetime.datetime(year, the_mon, day, hour, minute, sec, 0, timez)
-
+
if normalise_times:
# Normalise to host system's timezone
return the_dt.astimezone(imapclient.fixed_offset.FixedOffset.for_system()).replace(tzinfo=None)
@@ -114,9 +116,9 @@ def __init__(self, host = '', port = imaplib.IMAP4_SSL_PORT, keyfile = None, cer
"""
self.compressor = None
self.decompressor = None
-
+
imaplib.IMAP4_SSL.__init__(self, host, port, keyfile, certfile)
-
+
def activate_compression(self):
"""
activate_compressing()
@@ -125,8 +127,8 @@ def activate_compression(self):
# rfc 1951 - pure DEFLATE, so use -15 for both windows
self.decompressor = zlib.decompressobj(-15)
self.compressor = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -15)
-
- def open(self, host = '', port = imaplib.IMAP4_SSL_PORT):
+
+ def open(self, host = '', port = imaplib.IMAP4_SSL_PORT):
"""Setup connection to remote server on "host:port".
(default: localhost:standard IMAP4 SSL port).
This connection will be used by the routines:
@@ -135,16 +137,16 @@ def open(self, host = '', port = imaplib.IMAP4_SSL_PORT):
self.host = host
self.port = port
- self.sock = socket.create_connection((host, port), self.SOCK_TIMEOUT) #add so_timeout
+ self.sock = socket.create_connection((host, port), self.SOCK_TIMEOUT) #add so_timeout
#self.sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1) #try to set TCP NO DELAY to increase performances
self.sslobj = ssl.wrap_socket(self.sock, self.keyfile, self.certfile)
#self.sslobj = ssl.wrap_socket(self.sock, self.keyfile, self.certfile, suppress_ragged_eofs = False)
-
+
# This is the last correction added to avoid memory fragmentation in imaplib
- # makefile creates a file object that makes use of cStringIO to avoid mem fragmentation
- # it could be used without the compression
+ # makefile creates a file object that makes use of io to avoid mem fragmentation
+ # it could be used without the compression
# (maybe make 2 set of methods without compression and with compression)
#self.file = self.sslobj.makefile('rb')
@@ -153,38 +155,38 @@ def new_read(self, size):
Read 'size' bytes from remote.
Call _intern_read that takes care of the compression
"""
-
- chunks = cStringIO.StringIO() #use cStringIO.cStringIO to avoir too much fragmentation
+
+ chunks = io.BytesIO()
read = 0
while read < size:
try:
data = self._intern_read(min(size-read, 16384)) #never ask more than 16384 because imaplib can do it
- except ssl.SSLError, err:
- print("************* SSLError received %s" % (err))
+ except ssl.SSLError as err:
+ print(("************* SSLError received %s" % (err)))
raise self.abort('Gmvault ssl socket error: EOF. Connection lost, reconnect.')
read += len(data)
chunks.write(data)
-
- return chunks.getvalue() #return the cStringIO content
-
+
+ return chunks.getvalue() #return the io content
+
def read(self, size):
"""
Read 'size' bytes from remote.
Call _intern_read that takes care of the compression
"""
-
- chunks = cStringIO.StringIO() #use cStringIO.cStringIO to avoir too much fragmentation
+
+ chunks = io.BytesIO()
read = 0
while read < size:
data = self._intern_read(min(size-read, 16384)) #never ask more than 16384 because imaplib can do it
- if not data:
+ if not data:
#to avoid infinite looping due to empty string returned
- raise self.abort('Gmvault ssl socket error: EOF. Connection lost, reconnect.')
+ raise self.abort('Gmvault ssl socket error: EOF. Connection lost, reconnect.')
read += len(data)
chunks.write(data)
-
- return chunks.getvalue() #return the cStringIO content
-
+
+ return chunks.getvalue() #return the io content
+
def _intern_read(self, size):
"""
Read at most 'size' bytes from remote.
@@ -199,24 +201,24 @@ def _intern_read(self, size):
data = self.sslobj.read(8192) #Fixed buffer size. maybe change to 16384
return self.decompressor.decompress(data, size)
-
+
def readline(self):
"""Read line from remote."""
- line = cStringIO.StringIO() #use cStringIO to avoid memory fragmentation
+ line = io.BytesIO() #use BytesIO to avoid memory fragmentation
while 1:
#make use of read that takes care of the compression
#it could be simplified without compression
- char = self.read(1)
+ char = self.read(1)
line.write(char)
- if char in ("\n", ""):
+ if char in ("\n", ""):
return line.getvalue()
-
+
def shutdown(self):
"""Close I/O established in "open"."""
#self.file.close() #if file created
self.sock.close()
-
-
+
+
def send(self, data):
"""send(data)
Send 'data' to remote."""
@@ -224,7 +226,7 @@ def send(self, data):
data = self.compressor.compress(data)
data += self.compressor.flush(zlib.Z_SYNC_FLUSH)
self.sslobj.sendall(data)
-
+
def seq_to_parenlist(flags):
"""Convert a sequence of strings into parenthised list string for
use with IMAP commands.
@@ -234,13 +236,13 @@ def seq_to_parenlist(flags):
elif not isinstance(flags, (tuple, list)):
raise ValueError('invalid flags list: %r' % flags)
return '(%s)' % ' '.join(flags)
-
+
class MonkeyIMAPClient(imapclient.IMAPClient): #pylint:disable=R0903,R0904
"""
Need to extend the IMAPClient to do more things such as compression
Compression inspired by http://www.janeelix.com/piers/python/py2html.cgi/piers/python/imaplib2
"""
-
+
def __init__(self, host, port=None, use_uid=True, need_ssl=False):
"""
constructor
@@ -256,7 +258,7 @@ def oauth2_login(self, oauth2_cred):
typ, data = self._imap.authenticate('XOAUTH2', lambda x: oauth2_cred)
self._checkok('authenticate', typ, data)
return data[0]
-
+
def search(self, criteria): #pylint: disable=W0221
"""
Perform a imap search or gmail search
@@ -270,12 +272,12 @@ def search(self, criteria): #pylint: disable=W0221
return self.gmail_search(criteria.get('req',''))
else:
raise Exception("Unknown search type %s" % (criteria.get('type','no request type passed')))
-
+
def gmail_search(self, criteria):
"""
perform a search with gmailsearch criteria.
eg, subject:Hello World
- """
+ """
criteria = criteria.replace('\\', '\\\\')
criteria = criteria.replace('"', '\\"')
@@ -283,20 +285,20 @@ def gmail_search(self, criteria):
#args = ['CHARSET', 'utf-8', 'X-GM-RAW', '"%s"' % (criteria)]
#typ, data = self._imap.uid('SEARCH', *args)
- #working Literal search
+ #working Literal search
self._imap.literal = '"%s"' % (criteria)
self._imap.literal = imaplib.MapCRLF.sub(imaplib.CRLF, self._imap.literal)
self._imap.literal = self._imap.literal.encode("utf-8")
-
+
#use uid to keep the imap ids consistent
args = ['CHARSET', 'utf-8', 'X-GM-RAW']
typ, data = self._imap.uid('SEARCH', *args) #pylint: disable=W0142
-
+
self._checkok('search', typ, data)
if data == [None]: # no untagged responses...
return [ ]
- return [ long(i) for i in data[0].split() ]
+ return [ int(i) for i in data[0].split() ]
def append(self, folder, msg, flags=(), msg_time=None):
"""Append a message to *folder*.
@@ -326,7 +328,7 @@ def append(self, folder, msg, flags=(), msg_time=None):
time_val,
to_bytes(msg),
unpack=True)
-
+
def enable_compression(self):
"""
enable_compression()
@@ -342,4 +344,4 @@ def enable_compression(self):
#no errors for the moment
pass
-
+
diff --git a/src/gmv/progress_test.py b/src/gmv/progress_test.py
index 43a1578b..9537eea6 100644
--- a/src/gmv/progress_test.py
+++ b/src/gmv/progress_test.py
@@ -1,5 +1,7 @@
+from __future__ import absolute_import
import time
import sys
+from six.moves import range
def progress_2():
"""
@@ -11,7 +13,7 @@ def progress_2():
steps = 100
- for i in xrange(steps):
+ for i in range(steps):
time.sleep(0.1)
percents += 1
#sys.stdout.write("\b" * (len(to_write)))
@@ -32,7 +34,7 @@ def progress_1():
sys.stdout.flush()
sys.stdout.write("\b" * (toolbar_width+1)) # return to start of line, after '['
- for i in xrange(toolbar_width):
+ for i in range(toolbar_width):
time.sleep(0.1) # do real work here
# update the bar
sys.stdout.write("-")
diff --git a/src/gmv/test_utils.py b/src/gmv/test_utils.py
index 274579e8..cc536831 100755
--- a/src/gmv/test_utils.py
+++ b/src/gmv/test_utils.py
@@ -17,16 +17,18 @@
along with this program. If not, see .
'''
+from __future__ import absolute_import, print_function
+
import base64
import os
import datetime
import hashlib
-import gmv.gmvault as gmvault
-import gmv.imap_utils as imap_utils
-import gmv.credential_utils as cred_utils
-import gmv.gmvault_db as gmvault_db
-import gmv.gmvault_utils as gmvault_utils
+from . import gmvault
+from . import imap_utils
+from . import credential_utils as cred_utils
+from . import gmvault_db
+from . import gmvault_utils
def check_remote_mailbox_identical_to_local(the_self, gmvaulter, extra_labels = []): #pylint: disable=C0103,R0912,R0914,R0915
@@ -39,14 +41,14 @@ def check_remote_mailbox_identical_to_local(the_self, gmvaulter, extra_labels =
pivot_dir = None
gmail_ids = gmvaulter.gstorer.get_all_existing_gmail_ids(pivot_dir)
- print("gmail_ids = %s\n" % (gmail_ids))
-
+ print(("gmail_ids = %s\n" % (gmail_ids)))
+
#need to check that all labels are there for emails in essential
gmvaulter.src.select_folder('ALLMAIL')
-
- # check the number of id on disk
+
+ # check the number of id on disk
imap_ids = gmvaulter.src.search({ 'type' : 'imap', 'req' : 'ALL'}) #get everything
-
+
the_self.assertEquals(len(imap_ids), \
len(gmail_ids), \
"Error. Should have the same number of emails: local nb of emails %d,"\
@@ -54,11 +56,11 @@ def check_remote_mailbox_identical_to_local(the_self, gmvaulter, extra_labels =
for gm_id in gmail_ids:
- print("Fetching id %s with request %s" % (gm_id, imap_utils.GIMAPFetcher.GET_ALL_BUT_DATA))
+ print(("Fetching id %s with request %s" % (gm_id, imap_utils.GIMAPFetcher.GET_ALL_BUT_DATA)))
#get disk_metadata
disk_metadata = gmvaulter.gstorer.unbury_metadata(gm_id)
- print("disk metadata %s\n" % (disk_metadata))
+ print(("disk metadata %s\n" % (disk_metadata)))
#date = disk_metadata['internal_date'].strftime('"%d %b %Y"')
subject = disk_metadata.get('subject', None)
@@ -77,44 +79,44 @@ def check_remote_mailbox_identical_to_local(the_self, gmvaulter, extra_labels =
subject = subject.split("'")[0]
subject = subject.split('"')[0]
if has_something: #add extra space if it has a date
- req += ' '
+ req += ' '
req += 'SUBJECT "{subject}"'.format(subject=subject.strip().encode('utf-8'))
has_something = True
if msgid:
if has_something: #add extra space if it has a date
- req += ' '
+ req += ' '
req += 'HEADER MESSAGE-ID {msgid}'.format(msgid=msgid.strip())
has_something = True
-
+
if received:
if has_something:
req += ' '
req += 'HEADER X-GMAIL-RECEIVED {received}'.format(received=received.strip())
has_something = True
-
+
req += ")"
- print("Req = %s\n" % (req))
+ print(("Req = %s\n" % (req)))
imap_ids = gmvaulter.src.search({ 'type' : 'imap', 'req': req, 'charset': 'utf-8'})
- print("imap_ids = %s\n" % (imap_ids))
+ print(("imap_ids = %s\n" % (imap_ids)))
if len(imap_ids) != 1:
the_self.fail("more than one imap_id (%s) retrieved for request %s" % (imap_ids, req))
imap_id = imap_ids[0]
-
- # get online_metadata
+
+ # get online_metadata
online_metadata = gmvaulter.src.fetch(imap_id, \
- imap_utils.GIMAPFetcher.GET_ALL_BUT_DATA)
+ imap_utils.GIMAPFetcher.GET_ALL_BUT_DATA)
- print("online_metadata = %s\n" % (online_metadata))
- print("disk_metadata = %s\n" % (disk_metadata))
+ print(("online_metadata = %s\n" % (online_metadata)))
+ print(("disk_metadata = %s\n" % (disk_metadata)))
header_fields = online_metadata[imap_id]['BODY[HEADER.FIELDS (MESSAGE-ID SUBJECT X-GMAIL-RECEIVED)]']
-
+
subject, msgid, received = gmvault_db.GmailStorer.parse_header_fields(header_fields)
#compare metadata
@@ -123,16 +125,16 @@ def check_remote_mailbox_identical_to_local(the_self, gmvaulter, extra_labels =
the_self.assertEquals(received, disk_metadata.get('x_gmail_received', None))
# check internal date it is plus or minus 1 hour
- online_date = online_metadata[imap_id].get('INTERNALDATE', None)
- disk_date = disk_metadata.get('internal_date', None)
+ online_date = online_metadata[imap_id].get('INTERNALDATE', None)
+ disk_date = disk_metadata.get('internal_date', None)
if online_date != disk_date:
min_date = disk_date - datetime.timedelta(hours=1)
max_date = disk_date + datetime.timedelta(hours=1)
-
+
if min_date <= online_date <= max_date:
- print("online_date (%s) and disk_date (%s) differs but "\
- "within one hour. This is OK (timezone pb) *****" % (online_date, disk_date))
+ print(("online_date (%s) and disk_date (%s) differs but "\
+ "within one hour. This is OK (timezone pb) *****" % (online_date, disk_date)))
else:
the_self.fail("online_date (%s) and disk_date (%s) are different" % (online_date, disk_date))
@@ -142,7 +144,7 @@ def check_remote_mailbox_identical_to_local(the_self, gmvaulter, extra_labels =
for x_lab in extra_labels:
disk_labels.append(x_lab)
- online_labels = imap_utils.decode_labels(online_metadata[imap_id].get('X-GM-LABELS', None))
+ online_labels = imap_utils.decode_labels(online_metadata[imap_id].get('X-GM-LABELS', None))
#clean potential labels with multiple spaces
disk_labels = [ gmvault_utils.remove_consecutive_spaces_and_strip(label) for label in disk_labels ]
@@ -151,8 +153,8 @@ def check_remote_mailbox_identical_to_local(the_self, gmvaulter, extra_labels =
if not disk_labels: #no disk_labels check that there are no online_labels
the_self.assertTrue(not online_labels)
- print("disk_labels = %s\n" % (disk_labels))
- print("online_labels = %s\n" % (online_labels))
+ print(("disk_labels = %s\n" % (disk_labels)))
+ print(("online_labels = %s\n" % (online_labels)))
the_self.assertEquals(len(disk_labels), len(online_labels))
for label in disk_labels:
@@ -167,7 +169,7 @@ def check_remote_mailbox_identical_to_local(the_self, gmvaulter, extra_labels =
# check flags
disk_flags = disk_metadata.get('flags', None)
- online_flags = online_metadata[imap_id].get('FLAGS', None)
+ online_flags = online_metadata[imap_id].get('FLAGS', None)
if not disk_flags: #no disk flags
the_self.assertTrue(not online_flags)
@@ -178,7 +180,7 @@ def check_remote_mailbox_identical_to_local(the_self, gmvaulter, extra_labels =
if flag not in online_flags:
the_self.fail("flag %s should be in "\
"online_flags %s as it is in disk_flags %s" \
- % (flag, online_flags, disk_flags))
+ % (flag, online_flags, disk_flags))
def find_identical_emails(gmvaulter_a): #pylint: disable=R0914
"""
@@ -186,58 +188,58 @@ def find_identical_emails(gmvaulter_a): #pylint: disable=R0914
"""
# check all ids one by one
gmvaulter_a.src.select_folder('ALLMAIL')
-
- # check the number of id on disk
- imap_ids_a = gmvaulter_a.src.search({ 'type' : 'imap', 'req' : 'ALL'})
-
+
+ # check the number of id on disk
+ imap_ids_a = gmvaulter_a.src.search({ 'type' : 'imap', 'req' : 'ALL'})
+
batch_size = 1000
batch_fetcher_a = gmvault.IMAPBatchFetcher(gmvaulter_a.src, imap_ids_a, \
gmvaulter_a.error_report, imap_utils.GIMAPFetcher.GET_ALL_BUT_DATA, \
default_batch_size = batch_size)
-
- print("Got %d emails in gmvault_a(%s).\n" % (len(imap_ids_a), gmvaulter_a.login))
-
- identicals = {}
+
+ print(("Got %d emails in gmvault_a(%s).\n" % (len(imap_ids_a), gmvaulter_a.login)))
+
+ identicals = {}
in_db = {}
-
+
total_processed = 0
imap_ids = gmvaulter_a.src.search({ 'type' : 'imap', \
'req' : '(HEADER MESSAGE-ID 1929235391.1106286872672.JavaMail.wserver@disvds016)'})
- print("Len(imap_ids): %d, imap_ids = %s" % (len(imap_ids), imap_ids))
+ print(("Len(imap_ids): %d, imap_ids = %s" % (len(imap_ids), imap_ids)))
# get all gm_id for fetcher_b
for gm_ids in batch_fetcher_a:
cpt = 0
#print("gm_ids = %s\n" % (gm_ids))
- print("Process a new batch (%d). Total processed:%d.\n" % (batch_size, total_processed))
+ print(("Process a new batch (%d). Total processed:%d.\n" % (batch_size, total_processed)))
for one_id in gm_ids:
if cpt % 50 == 0:
- print("look for %s" % (one_id))
+ print(("look for %s" % (one_id)))
header_fields = gm_ids[one_id]['BODY[HEADER.FIELDS (MESSAGE-ID SUBJECT X-GMAIL-RECEIVED)]']
-
+
subject, msgid, received = gmvault_db.GmailStorer.parse_header_fields(header_fields)
labels = gm_ids[one_id]['X-GM-LABELS']
- date_internal = gm_ids[one_id]['INTERNALDATE']
+ date_internal = gm_ids[one_id]['INTERNALDATE']
if not in_db.get(msgid, None):
in_db[msgid] = [{'subject': subject, 'received': received, \
'gmid': gm_ids[one_id]['X-GM-MSGID'], \
- 'date': date_internal , 'labels': labels}]
+ 'date': date_internal , 'labels': labels}]
else:
in_db[msgid].append({'subject': subject, 'received': received, \
'gmid': gm_ids[one_id]['X-GM-MSGID'], \
- 'date': date_internal , 'labels': labels})
- print("identical found msgid %s : %s" \
+ 'date': date_internal , 'labels': labels})
+ print(("identical found msgid %s : %s" \
% (msgid, {'subject': subject, \
'received': received, \
'gmid': gm_ids[one_id]['X-GM-MSGID'],\
- 'date': date_internal , 'labels': labels}))
-
- cpt += 1
+ 'date': date_internal , 'labels': labels})))
+
+ cpt += 1
total_processed += batch_size
#create list of identicals
@@ -246,19 +248,19 @@ def find_identical_emails(gmvaulter_a): #pylint: disable=R0914
identicals[msgid] = in_db[msgid]
#print identicals
- print("Found %d identicals" % (len(identicals)))
+ print(("Found %d identicals" % (len(identicals))))
for msgid in identicals:
- print("== MSGID ==: %s" % (msgid))
+ print(("== MSGID ==: %s" % (msgid)))
for vals in identicals[msgid]:
- print("===========> gmid: %s ### date: %s ### subject: %s ### "\
+ print(("===========> gmid: %s ### date: %s ### subject: %s ### "\
"labels: %s ### received: %s" \
% (vals.get('gmid',None), vals.get('date', None),\
vals.get('subject',None), vals.get('labels', None), \
- vals.get('received',None)))
+ vals.get('received',None))))
#print("vals:%s" % (vals))
print("\n")
-
- #print("Identical emails:\n%s" % (identicals))
+
+ #print("Identical emails:\n%s" % (identicals))
def diff_online_mailboxes(gmvaulter_a, gmvaulter_b): #pylint: disable=R0912, R0914
"""
@@ -267,111 +269,111 @@ def diff_online_mailboxes(gmvaulter_a, gmvaulter_b): #pylint: disable=R0912, R09
# check all ids one by one
gmvaulter_a.src.select_folder('ALLMAIL')
gmvaulter_b.src.select_folder('ALLMAIL')
-
- # check the number of id on disk
- imap_ids_a = gmvaulter_a.src.search({ 'type' : 'imap', 'req' : 'ALL'})
- imap_ids_b = gmvaulter_b.src.search({ 'type' : 'imap', 'req' : 'ALL'})
-
+
+ # check the number of id on disk
+ imap_ids_a = gmvaulter_a.src.search({ 'type' : 'imap', 'req' : 'ALL'})
+ imap_ids_b = gmvaulter_b.src.search({ 'type' : 'imap', 'req' : 'ALL'})
+
batch_size = 700
batch_fetcher_a = gmvault.IMAPBatchFetcher(gmvaulter_a.src, imap_ids_a, gmvaulter_a.error_report, \
imap_utils.GIMAPFetcher.GET_ALL_BUT_DATA, \
default_batch_size = batch_size)
-
+
batch_fetcher_b = gmvault.IMAPBatchFetcher(gmvaulter_b.src, imap_ids_b, gmvaulter_b.error_report, \
imap_utils.GIMAPFetcher.GET_ALL_BUT_DATA, \
default_batch_size = batch_size)
-
- print("Got %d emails in gmvault_a(%s).\n" % (len(imap_ids_a), gmvaulter_a.login))
- print("Got %d emails in gmvault_b(%s).\n" % (len(imap_ids_b), gmvaulter_b.login))
-
+
+ print(("Got %d emails in gmvault_a(%s).\n" % (len(imap_ids_a), gmvaulter_a.login)))
+ print(("Got %d emails in gmvault_b(%s).\n" % (len(imap_ids_b), gmvaulter_b.login)))
+
if len(imap_ids_a) != len(imap_ids_b):
- print("Oh Oh, gmvault_a has %s emails and gmvault_b has %s emails\n" \
- % (len(imap_ids_a), len(imap_ids_b)))
+ print(("Oh Oh, gmvault_a has %s emails and gmvault_b has %s emails\n" \
+ % (len(imap_ids_a), len(imap_ids_b))))
else:
- print("Both databases has %d emails." % (len(imap_ids_a)))
-
+ print(("Both databases has %d emails." % (len(imap_ids_a))))
+
diff_result = { "in_a" : {},
"in_b" : {},
- }
-
+ }
+
gm_ids_b = {}
total_processed = 0
# get all gm_id for fetcher_b
for gm_ids in batch_fetcher_b:
#print("gm_ids = %s\n" % (gm_ids))
- print("Process a new batch (%d). Total processed:%d.\n" % (batch_size, total_processed))
+ print(("Process a new batch (%d). Total processed:%d.\n" % (batch_size, total_processed)))
for one_id in gm_ids:
gm_id = gm_ids[one_id]['X-GM-MSGID']
-
+
header_fields = gm_ids[one_id]['BODY[HEADER.FIELDS (MESSAGE-ID SUBJECT X-GMAIL-RECEIVED)]']
-
+
subject, msgid, received = gmvault_db.GmailStorer.parse_header_fields(header_fields)
-
+
the_hash = hashlib.md5()
if received:
the_hash.update(received)
-
+
if subject:
the_hash.update(subject)
-
+
if msgid:
the_hash.update(msgid)
id = base64.encodestring(the_hash.digest())
-
+
gm_ids_b[id] = [gm_id, subject, msgid]
total_processed += batch_size
#dumb search not optimisation
#iterate over imap_ids_a and flag emails only in a but not in b
- #remove emails from imap_ids_b everytime they are found
+ #remove emails from imap_ids_b everytime they are found
for data_infos in batch_fetcher_a:
for gm_info in data_infos:
gm_id = data_infos[gm_info]['X-GM-MSGID']
-
+
header_fields = data_infos[gm_info]['BODY[HEADER.FIELDS (MESSAGE-ID SUBJECT X-GMAIL-RECEIVED)]']
-
+
subject, msgid, received = gmvault_db.GmailStorer.parse_header_fields(header_fields)
-
+
the_hash = hashlib.md5()
if received:
the_hash.update(received)
-
+
if subject:
the_hash.update(subject)
-
+
if msgid:
the_hash.update(msgid)
id = base64.encodestring(the_hash.digest())
-
+
if id not in gm_ids_b:
diff_result["in_a"][received] = [gm_id, subject, msgid]
else:
del gm_ids_b[id]
-
+
for recv_id in gm_ids_b:
diff_result["in_b"][recv_id] = gm_ids_b[recv_id]
-
-
+
+
# print report
if (len(diff_result["in_a"]) > 0 or len(diff_result["in_b"]) > 0):
- print("emails only in gmv_a:\n")
+ print("emails only in gmv_a:\n")
print_diff_result(diff_result["in_a"])
print("\n")
- print("emails only in gmv_b:%s\n")
+ print("emails only in gmv_b:%s\n")
print_diff_result(diff_result["in_b"])
else:
- print("Mailbox %s and %s are identical.\n" % (gmvaulter_a.login, gmvaulter_b.login))
-
+ print(("Mailbox %s and %s are identical.\n" % (gmvaulter_a.login, gmvaulter_b.login)))
+
def print_diff_result(diff_result):
""" print the diff_result structure
"""
for key in diff_result:
vals = diff_result[key]
- print("mailid:%s#####subject:%s#####%s." % (vals[2], vals[1], vals[0]))
+ print(("mailid:%s#####subject:%s#####%s." % (vals[2], vals[1], vals[0])))
def assert_login_is_protected(login):
@@ -379,7 +381,7 @@ def assert_login_is_protected(login):
Insure that the login is not my personnal mailbox
"""
if login != 'gsync.mtester@gmail.com':
- raise Exception("Beware login should be gsync.mtester@gmail.com and it is %s" % (login))
+ raise Exception("Beware login should be gsync.mtester@gmail.com and it is %s" % (login))
def clean_mailbox(login , credential):
"""
@@ -387,12 +389,12 @@ def clean_mailbox(login , credential):
"""
gimap = imap_utils.GIMAPFetcher('imap.gmail.com', 993, login, credential, readonly_folder = False)
- print("login = %s" % (login))
+ print(("login = %s" % (login)))
assert_login_is_protected(login)
gimap.connect()
-
+
gimap.erase_mailbox()
@@ -425,7 +427,7 @@ def get_oauth_cred(email, cred_path):
token = None
secret = None
if os.path.exists(user_oauth_file_path):
- print("Get XOAuth credential from %s.\n" % user_oauth_file_path)
+ print(("Get XOAuth credential from %s.\n" % user_oauth_file_path))
try:
with open(user_oauth_file_path) as oauth_file:
@@ -435,10 +437,10 @@ def get_oauth_cred(email, cred_path):
if len(oauth_result) == 2:
token = oauth_result[0]
secret = oauth_result[1]
- except Exception, _: #pylint: disable-msg=W0703
- print("Cannot read oauth credentials from %s. Force oauth credentials renewal." % user_oauth_file_path)
+ except Exception as _: #pylint: disable-msg=W0703
+ print(("Cannot read oauth credentials from %s. Force oauth credentials renewal." % user_oauth_file_path))
print("=== Exception traceback ===")
- print(gmvault_utils.get_exception_traceback())
+ print((gmvault_utils.get_exception_traceback()))
print("=== End of Exception traceback ===\n")
if token: token = token.strip() #pylint: disable-msg=C0321
diff --git a/src/gmv/validation_tests.py b/src/gmv/validation_tests.py
index 581f4437..ead987b1 100755
--- a/src/gmv/validation_tests.py
+++ b/src/gmv/validation_tests.py
@@ -16,6 +16,7 @@
along with this program. If not, see .
'''
+from __future__ import absolute_import
import unittest
import base64
diff --git a/src/gmv_cmd_tests.py b/src/gmv_cmd_tests.py
index 358d566c..3f463b60 100755
--- a/src/gmv_cmd_tests.py
+++ b/src/gmv_cmd_tests.py
@@ -16,6 +16,8 @@
along with this program. If not, see .
'''
+from __future__ import absolute_import, print_function
+
import sys
import unittest
import base64
@@ -24,6 +26,7 @@
import ssl
import imaplib
+
import gmv.gmvault as gmvault
import gmv.gmvault_db as gmvault_db
import gmv.gmvault_utils as gmvault_utils
@@ -64,12 +67,12 @@ class TestGMVCMD(unittest.TestCase): #pylint:disable-msg=R0904
def __init__(self, stuff):
""" constructor """
super(TestGMVCMD, self).__init__(stuff)
-
+
self.login = None
self.passwd = None
-
+
self.gmvault_login = None
- self.gmvault_passwd = None
+ self.gmvault_passwd = None
def setUp(self): #pylint:disable-msg=C0103
self.login, self.passwd = read_password_file('/homespace/gaubert/.ssh/passwd')
@@ -85,14 +88,14 @@ def ztest_commandline_args(self):
Test commandline args
"""
gmv_cmd.init_logging()
-
+
# test 1: default
sys.argv = ['gmvault.py', 'sync', self.login]
-
+
gmvlt = gmv_cmd.GMVaultLauncher()
-
+
args = gmvlt.parse_args()
-
+
#check args
self.assertEquals(args['command'], 'sync')
self.assertEquals(args['type'], 'full')
@@ -104,17 +107,17 @@ def ztest_commandline_args(self):
self.assertEquals(args['port'], 993)
self.assertEquals(args['db-cleaning'], True)
self.assertEquals(args['db-dir'],'%s/gmvault-db' % (os.environ['HOME']))
-
-
+
+
# test 2: do imap search
sys.argv = ['gmvault.py', 'sync','-t', 'custom',
'-r', 'Since 1-Nov-2011 Before 4-Nov-2011', \
'--db-dir','/tmp/new-db-1', self.login]
-
+
gmvlt = gmv_cmd.GMVaultLauncher()
-
+
args = gmvlt.parse_args()
-
+
#check args
self.assertEquals(args['command'], 'sync')
self.assertEquals(args['type'], 'custom')
@@ -126,17 +129,17 @@ def ztest_commandline_args(self):
self.assertEquals(args['port'], 993)
self.assertEquals(args['db-cleaning'], True)
self.assertEquals(args['db-dir'],'/tmp/new-db-1')
-
+
# test 2: do gmail search
sys.argv = ['gmvault.py', 'sync','-t', 'custom',
'-g', 'subject:Chandeleur bis', \
'--db-dir','/tmp/new-db-1', self.login]
-
+
#do same as in bootstrap
gmvlt = gmv_cmd.GMVaultLauncher()
-
+
args = gmvlt.parse_args()
-
+
#check args
self.assertEquals(args['command'], 'sync')
self.assertEquals(args['type'], 'custom')
@@ -148,18 +151,18 @@ def ztest_commandline_args(self):
self.assertEquals(args['port'], 993)
self.assertEquals(args['db-cleaning'], True)
self.assertEquals(args['db-dir'],'/tmp/new-db-1')
-
+
#test3 emails only
sys.argv = ['gmvault.py', 'sync','-t', 'custom',
'-g', 'subject:Chandeleur bis', \
'--db-dir','/tmp/new-db-1', \
'--emails-only', self.login]
-
+
#with emails only
gmvlt = gmv_cmd.GMVaultLauncher()
-
+
args = gmvlt.parse_args()
-
+
#check args
self.assertEquals(args['emails_only'], True)
self.assertEquals(args['chats_only'], False)
@@ -173,17 +176,17 @@ def ztest_commandline_args(self):
self.assertEquals(args['port'], 993)
self.assertEquals(args['db-cleaning'], True)
self.assertEquals(args['db-dir'],'/tmp/new-db-1')
-
+
#test chats only
sys.argv = ['gmvault.py', 'sync','-t', 'custom',
'-g', 'subject:Chandeleur bis', \
'--db-dir','/tmp/new-db-1', \
'--chats-only', self.login]
-
+
gmvlt = gmv_cmd.GMVaultLauncher()
-
+
args = gmvlt.parse_args()
-
+
#check args
self.assertEquals(args['chats_only'], True)
self.assertEquals(args['emails_only'], False)
@@ -201,20 +204,20 @@ def ztest_commandline_args(self):
self.assertEquals(args['compression'], True)
self.assertEquals(args['debug'], False)
self.assertEquals(args['restart'], False)
-
-
+
+
#test5 chats only
sys.argv = ['gmvault.py', 'sync','-t', 'custom',
'-g', 'subject:Chandeleur bis', \
'--db-dir','/tmp/new-db-1', \
'--check-db', 'no', '--resume', '--debug',\
'--no-compression', self.login]
-
+
#with emails only
gmvlt = gmv_cmd.GMVaultLauncher()
-
+
args = gmvlt.parse_args()
-
+
#check args
self.assertEquals(args['chats_only'], False)
self.assertEquals(args['emails_only'], False)
@@ -231,8 +234,8 @@ def ztest_commandline_args(self):
self.assertEquals(args['compression'], False)
self.assertEquals(args['debug'], True)
self.assertEquals(args['restart'], True)
-
-
+
+
def zztest_cli_bad_server(self):
"""
Test the cli interface bad option
@@ -241,18 +244,18 @@ def zztest_cli_bad_server(self):
'--port', '993', '--imap-req', \
'Since 1-Nov-2011 Before 4-Nov-2011', \
self.login]
-
+
gmvaulter = gmv_cmd.GMVaultLauncher()
-
+
args = gmvaulter.parse_args()
-
+
try:
-
+
gmvaulter.run(args)
-
- except SystemExit, _:
+
+ except SystemExit as _:
print("In Error success")
-
+
def ztest_cli_bad_passwd(self):
"""
Test the cli interface bad option
@@ -261,18 +264,18 @@ def ztest_cli_bad_passwd(self):
'--imap-port', 993, '--imap-request', \
'Since 1-Nov-2011 Before 4-Nov-2011', \
'--email', self.login, '--passwd', 'bar']
-
+
gmvaulter = gmv_cmd.GMVaultLauncher()
-
+
args = gmvaulter.parse_args()
-
+
try:
-
+
gmvaulter.run(args)
-
- except SystemExit, err:
+
+ except SystemExit as err:
print("In Error success")
-
+
def ztest_cli_bad_login(self):
"""
Test the cli interface bad option
@@ -281,20 +284,20 @@ def ztest_cli_bad_login(self):
'--imap-port', 993, '--imap-request', \
'Since 1-Nov-2011 Before 4-Nov-2011', \
'--passwd', ]
-
+
gmvaulter = gmv_cmd.GMVaultLauncher()
-
+
args = gmvaulter.parse_args()
-
+
try:
-
+
gmvaulter.run(args)
-
- except SystemExit, err:
+
+ except SystemExit as err:
print("In Error success")
-
-
-
+
+
+
def zztest_cli_host_error(self):
"""
Test the cli interface bad option
@@ -302,15 +305,15 @@ def zztest_cli_host_error(self):
sys.argv = ['gmvault.py', 'sync', '--host', \
'imap.gmail.com', '--port', '1452', \
self.login]
-
+
gmvaulter = gmv_cmd.GMVaultLauncher()
-
+
try:
_ = gmvaulter.parse_args()
- except SystemExit, err:
+ except SystemExit as err:
self.assertEquals(type(err), type(SystemExit()))
self.assertEquals(err.code, 2)
- except Exception, err:
+ except Exception as err:
self.fail('unexpected exception: %s' % err)
else:
self.fail('SystemExit exception expected')
@@ -323,12 +326,12 @@ def zztest_cli_(self):
'--port', '993', '--imap-req', \
'Since 1-Nov-2011 Before 10-Nov-2011', \
'--passwd', self.login]
-
+
gmvaulter = gmv_cmd.GMVaultLauncher()
-
+
try:
args = gmvaulter.parse_args()
-
+
self.assertEquals(args['command'],'sync')
self.assertEquals(args['type'],'full')
self.assertEquals(args['email'], self.login)
@@ -338,12 +341,12 @@ def zztest_cli_(self):
self.assertEquals(args['host'],'imap.gmail.com')
self.assertEquals(args['port'], 993)
self.assertEquals(args['db-dir'],'./gmvault-db')
-
- except SystemExit, err:
+
+ except SystemExit as err:
self.fail("SystemExit Exception: %s" % err)
- except Exception, err:
+ except Exception as err:
self.fail('unexpected exception: %s' % err)
-
+
def ztest_full_sync_gmv(self):
"""
full test via the command line
@@ -352,101 +355,101 @@ def ztest_full_sync_gmv(self):
'--imap-port', '993', '--imap-request', \
'Since 1-Nov-2011 Before 5-Nov-2011', '--email', \
self.login, '--passwd', self.passwd]
-
+
gmvault_launcher = gmv_cmd.GMVaultLauncher()
-
+
args = gmvault_launcher.parse_args()
-
+
gmvault_launcher.run(args)
-
+
#check all stored gmail ids
gstorer = gmvault.GmailStorer(args['db-dir'])
-
+
ids = gstorer.get_all_existing_gmail_ids()
-
+
self.assertEquals(len(ids), 5)
-
- self.assertEquals(ids, {1384403887202624608L: '2011-11', \
- 1384486067720566818L: '2011-11', \
- 1384313269332005293L: '2011-11', \
- 1384545182050901969L: '2011-11', \
- 1384578279292583731L: '2011-11'})
-
+
+ self.assertEquals(ids, {1384403887202624608: '2011-11', \
+ 1384486067720566818: '2011-11', \
+ 1384313269332005293: '2011-11', \
+ 1384545182050901969: '2011-11', \
+ 1384578279292583731: '2011-11'})
+
#clean db dir
delete_db_dir(args['db-dir'])
-
+
def ztest_password_handling(self):
"""
Test all credentials handling
"""
gmv_cmd.init_logging()
-
+
# test 1: enter passwd and go to interactive mode
sys.argv = ['gmvault.py', '--imap-request', \
'Since 1-Nov-2011 Before 7-Nov-2011', \
'--email', self.login, \
'--passwd', '--interactive', '--db-dir', '/tmp/new-db-1']
-
+
gmvault_launcher = gmv_cmd.GMVaultLauncher()
-
+
args = gmvault_launcher.parse_args()
-
+
credential = gmvault_launcher.get_credential(args, test_mode = {'activate': True, 'value' : 'a_password'}) #test_mode needed to avoid calling get_pass
-
+
self.assertEquals(credential, {'type': 'passwd', 'value': 'a_password'})
-
+
# store passwd and re-read it
sys.argv = ['gmvault.py', '--imap-request', \
'Since 1-Nov-2011 Before 7-Nov-2011', \
'--email', self.login, \
'--passwd', '--save-passwd', '--db-dir', '/tmp/new-db-1']
-
+
gmvault_launcher = gmv_cmd.GMVaultLauncher()
-
+
args = gmvault_launcher.parse_args()
-
+
credential = gmvault_launcher.get_credential(args, test_mode = {'activate': True, 'value' : 'a_new_password'})
-
+
self.assertEquals(credential, {'type': 'passwd', 'option': 'saved', 'value': 'a_new_password'})
-
+
# now read the password
sys.argv = ['gmvault.py', 'sync', '--imap-req', \
'Since 1-Nov-2011 Before 7-Nov-2011', \
'-t', 'custom', \
'--passwd', '--db-dir', '/tmp/new-db-1', self.login]
-
+
gmvault_launcher = gmv_cmd.GMVaultLauncher()
-
+
args = gmvault_launcher.parse_args()
-
+
credential = gmvault_launcher.get_credential(args, test_mode = {'activate': True, 'value' : "don't care"})
-
+
self.assertEquals(credential, {'type': 'passwd', 'option': 'read', 'value': 'a_new_password'})
-
-
+
+
def ztest_double_login(self):
"""
double login
"""
# now read the password
sys.argv = ['gmvault.py', 'sync', '--db-dir', '/tmp/new-db-1', self.login]
-
+
gmvault_launcher = gmv_cmd.GMVaultLauncher()
-
+
args = gmvault_launcher.parse_args()
-
+
credential = credential_utils.CredentialHelper.get_credential(args)
-
+
syncer = gmvault.GMVaulter(args['db-dir'], args['host'], args['port'], \
args['email'], credential)
-
+
print("First connection \n")
syncer.src.connect()
-
+
import time
time.sleep(60*10)
-
+
print("Connection 10 min later")
syncer.src.connect()
@@ -463,7 +466,7 @@ def ztest_oauth2_login(self):
credential = credential_utils.CredentialHelper.get_credential(args)
- print("CREDENTIALS:%s" % (credential))
+ print(("CREDENTIALS:%s" % (credential)))
syncer = gmvault.GMVaulter(args['db-dir'], args['host'], args['port'], \
args['email'], credential)
@@ -496,7 +499,7 @@ def test_oauth2_reconnect(self):
credential = credential_utils.CredentialHelper.get_credential(args)
- print("CREDENTIALS:%s" % (credential))
+ print(("CREDENTIALS:%s" % (credential)))
syncer = gmvault.GMVaulter(args['db-dir'], args['host'], args['port'], \
args['email'], credential)
@@ -522,28 +525,28 @@ def ztest_debug_restore(self):
"""
# now read the password
sys.argv = ['gmvault.py', 'restore', '--db-dir', '/Users/gaubert/Dev/projects/gmvault/src/gmv/gmvault-db', 'gsync.mtester@gmail.com']
-
+
gmv_cmd.bootstrap_run()
-
+
def ztest_restore_with_labels(self):
"""
Test restore with labels
"""
-
+
sys.argv = ['gmvault.py', 'restore', '--restart', '--db-dir', '/Users/gaubert/Dev/projects/gmvault/src/gmv/gmvault-db', 'gsync.mtester@gmail.com']
-
+
gmv_cmd.bootstrap_run()
-
-
+
+
def ztest_quick_sync_with_labels(self):
"""
Test quick sync
--renew-passwd
"""
sys.argv = ['gmvault.py', 'sync', self.login]
-
+
gmv_cmd.bootstrap_run()
-
+
def ztest_simple_get_and_restore(self):
"""
get few emails and restore them
@@ -551,19 +554,19 @@ def ztest_simple_get_and_restore(self):
db_dir = '/tmp/gmail_bk'
#clean db dir
delete_db_dir(db_dir)
-
+
print("Synchronize\n")
-
+
sys.argv = ['gmvault.py', 'sync', '-t', 'custom', '-r', 'Since 1-Nov-2011 Before 3-Nov-2011', '--db-dir', db_dir, 'guillaume.aubert@gmail.com']
gmv_cmd.bootstrap_run()
-
+
print("Restore\n")
-
+
sys.argv = ['gmvault.py', 'restore', '--db-dir', db_dir, 'gsync.mtester@gmail.com']
gmv_cmd.bootstrap_run()
-
+
def ztest_simple_get_encrypt_and_restore(self):
"""
get few emails and restore them
@@ -571,63 +574,63 @@ def ztest_simple_get_encrypt_and_restore(self):
db_dir = '/tmp/gmail_bk'
#clean db dir
delete_db_dir(db_dir)
-
+
print("Synchronize\n")
-
+
sys.argv = ['gmvault.py', 'sync', '-t', 'custom', '--encrypt','-r', 'Since 1-Nov-2011 Before 3-Nov-2011', '--db-dir', db_dir, 'guillaume.aubert@gmail.com']
gmv_cmd.bootstrap_run()
-
+
print("Restore\n")
-
+
sys.argv = ['gmvault.py', 'restore', '--db-dir', db_dir, 'gsync.mtester@gmail.com']
gmv_cmd.bootstrap_run()
-
+
def ztest_delete_sync_gmv(self):
"""
delete sync via command line
"""
delete_db_dir('/tmp/new-db-1')
-
+
#first request to have the extra dirs
sys.argv = ['gmvault.py', 'sync', '-t', 'custom', '-r', \
'Since 1-Nov-2011 Before 7-Nov-2011', \
'--db-dir', '/tmp/new-db-1', 'guillaume.aubert@gmail.com']
-
+
#check all stored gmail ids
gstorer = gmvault_db.GmailStorer('/tmp/new-db-1')
-
+
gmv_cmd.bootstrap_run()
-
+
ids = gstorer.get_all_existing_gmail_ids()
-
+
self.assertEquals(len(ids), 9)
-
+
delete_db_dir('/tmp/new-db-1')
-
- #second requests so all files after the 5 should disappear
+
+ #second requests so all files after the 5 should disappear
sys.argv = ['gmvault.py', 'sync', '-t', 'custom', '-r', \
'Since 1-Nov-2011 Before 5-Nov-2011', \
'--db-dir', '/tmp/new-db-1', '-c', 'yes', 'guillaume.aubert@gmail.com']
-
+
gmv_cmd.bootstrap_run()
-
+
gstorer = gmvault_db.GmailStorer('/tmp/new-db-1')
-
+
ids = gstorer.get_all_existing_gmail_ids()
-
+
self.assertEquals(len(ids), 5)
-
- self.assertEquals(ids, {1384403887202624608L: '2011-11', \
- 1384486067720566818L: '2011-11', \
- 1384313269332005293L: '2011-11', \
- 1384545182050901969L: '2011-11', \
- 1384578279292583731L: '2011-11'})
-
+
+ self.assertEquals(ids, {1384403887202624608: '2011-11', \
+ 1384486067720566818: '2011-11', \
+ 1384313269332005293: '2011-11', \
+ 1384545182050901969: '2011-11', \
+ 1384578279292583731: '2011-11'})
+
#clean db dir
delete_db_dir('/tmp/new-db-1')
-
+
def tests():
"""
@@ -635,7 +638,7 @@ def tests():
"""
suite = unittest.TestLoader().loadTestsFromTestCase(TestGMVCMD)
unittest.TextTestRunner(verbosity=2).run(suite)
-
+
if __name__ == '__main__':
-
+
tests()
diff --git a/src/gmv_runner.py b/src/gmv_runner.py
index 4d796007..f51ad493 100755
--- a/src/gmv_runner.py
+++ b/src/gmv_runner.py
@@ -17,6 +17,7 @@
'''
+from __future__ import absolute_import
import gmv.gmv_cmd
gmv.gmv_cmd.bootstrap_run()
diff --git a/src/gmvault_essential_tests.py b/src/gmvault_essential_tests.py
index 2b3ff4d4..a2d64490 100755
--- a/src/gmvault_essential_tests.py
+++ b/src/gmvault_essential_tests.py
@@ -15,12 +15,14 @@
You should have received a copy of the GNU General Public License
along with this program. If not, see .
'''
+from __future__ import absolute_import, print_function
+
import unittest
+
import gmv.gmvault as gmvault
import gmv.gmvault_utils as gmvault_utils
import gmv.test_utils as test_utils
-
class TestEssentialGMVault(unittest.TestCase): #pylint:disable-msg=R0904
"""
Current Main test class
@@ -29,12 +31,12 @@ class TestEssentialGMVault(unittest.TestCase): #pylint:disable-msg=R0904
def __init__(self, stuff):
""" constructor """
super(TestEssentialGMVault, self).__init__(stuff)
-
+
self.gsync_login = None
- self.gsync_passwd = None
+ self.gsync_passwd = None
self.gmvault_test_login = None
self.gmvault_test_passwd = None
-
+
def setUp(self): #pylint:disable-msg=C0103
"""setup"""
self.gsync_login, self.gsync_passwd = test_utils.read_password_file('/homespace/gaubert/.ssh/gsync_passwd')
@@ -53,10 +55,10 @@ def search_for_email(self, gmvaulter, req):
gmvaulter.src.select_folder('ALLMAIL')
imap_ids = gmvaulter.src.search({ 'type' : 'imap', 'req': req, 'charset': 'utf-8' })
-
- print("imap_ids = %s\n" % (imap_ids))
-
-
+
+ print(("imap_ids = %s\n" % (imap_ids)))
+
+
def test_restore_tricky_emails(self):
""" Test_restore_tricky_emails. Restore emails with some specificities (japanese characters) in the a mailbox """
gsync_credential = { 'type' : 'passwd', 'value': self.gsync_passwd }
@@ -69,41 +71,41 @@ def test_restore_tricky_emails(self):
test_db_dir = "/homespace/gaubert/gmvault-dbs/essential-dbs"
#test_db_dir = "/home/gmv/Dev/projects/gmvault-develop/src/test-db"
#test_db_dir = "/Users/gaubert/Dev/projects/gmvault-develop/src/test-db"
-
+
restorer = gmvault.GMVaulter(test_db_dir, 'imap.gmail.com', 993, \
self.gsync_login, gsync_credential, \
read_only_access = False)
-
+
restorer.restore(extra_labels = extra_labels) #restore all emails from this essential-db
test_utils.check_remote_mailbox_identical_to_local(self, restorer, extra_labels)
-
+
def test_backup_and_restore(self):
""" Backup from gmvault_test and restore """
gsync_credential = { 'type' : 'passwd', 'value': self.gsync_passwd }
gmvault_test_credential = { 'type' : 'passwd', 'value': self.gmvault_test_passwd }
-
+
test_utils.clean_mailbox(self.gsync_login, gsync_credential)
-
+
gmvault_test_db_dir = "/tmp/backup-restore"
-
+
backuper = gmvault.GMVaulter(gmvault_test_db_dir, 'imap.gmail.com', 993, \
self.gmvault_test_login, gmvault_test_credential, \
read_only_access = False)
-
+
backuper.sync({ 'mode': 'full', 'type': 'imap', 'req': 'ALL' })
-
+
#check that we have x emails in the database
restorer = gmvault.GMVaulter(gmvault_test_db_dir, 'imap.gmail.com', 993, \
self.gsync_login, gsync_credential, \
read_only_access = False)
-
+
restorer.restore() #restore all emails from this essential-db
test_utils.check_remote_mailbox_identical_to_local(self, restorer)
test_utils.diff_online_mailboxes(backuper, restorer)
-
+
gmvault_utils.delete_all_under(gmvault_test_db_dir, delete_top_dir = True)
def ztest_delete_gsync(self):
@@ -114,20 +116,20 @@ def ztest_delete_gsync(self):
gmvault_test_credential = { 'type' : 'passwd', 'value': self.gmvault_test_passwd }
test_utils.clean_mailbox(self.gsync_login, gsync_credential)
-
+
def ztest_find_identicals(self):
"""
"""
gsync_credential = { 'type' : 'passwd', 'value': self.gsync_passwd }
-
+
gmv_dir_a = "/tmp/a-db"
gmv_a = gmvault.GMVaulter(gmv_dir_a, 'imap.gmail.com', 993, self.gsync_login, gsync_credential, read_only_access = True)
-
+
test_utils.find_identical_emails(gmv_a)
-
+
def ztest_difference(self):
"""
-
+
"""
gsync_credential = { 'type' : 'passwd', 'value': self.gsync_passwd }
gmvault_test_credential = { 'type' : 'passwd', 'value': self.gmvault_test_passwd }
@@ -137,23 +139,23 @@ def ztest_difference(self):
gmv_dir_b = "/tmp/b-db"
gmv_a = gmvault.GMVaulter(gmv_dir_a, 'imap.gmail.com', 993, self.gsync_login, gsync_credential, read_only_access = True)
-
+
#gmv_a = gmvault.GMVaulter(gmv_dir_a, 'imap.gmail.com', 993, self.gmvault_test_login, gmvault_test_credential, read_only_access = False)
-
+
#gmv_b = gmvault.GMVaulter(gmv_dir_b, 'imap.gmail.com', 993, self.gmvault_test_login, gmvault_test_credential, read_only_access = False)
#gmv_b = gmvault.GMVaulter(gmv_dir_b, 'imap.gmail.com', 993, self.ba_login, ba_credential, read_only_access = True)
gmv_b = gmvault.GMVaulter(gmv_dir_b, 'imap.gmail.com', 993, self.ga_login, self.ga_cred, read_only_access = True)
-
+
test_utils.diff_online_mailboxes(gmv_a, gmv_b)
-
+
def tests():
"""
main test function
"""
suite = unittest.TestLoader().loadTestsFromTestCase(TestEssentialGMVault)
unittest.TextTestRunner(verbosity=2).run(suite)
-
+
if __name__ == '__main__':
-
+
tests()
diff --git a/src/gmvault_tests.py b/src/gmvault_tests.py
index dc205179..9e0aceb8 100755
--- a/src/gmvault_tests.py
+++ b/src/gmvault_tests.py
@@ -15,6 +15,8 @@
You should have received a copy of the GNU General Public License
along with this program. If not, see .
'''
+from __future__ import absolute_import, print_function
+
import unittest
import base64
import shutil
@@ -25,6 +27,7 @@
import gmv.gmvault as gmvault
import gmv.gmvault_utils as gmvault_utils
import gmv.imap_utils as imap_utils
+from six.moves import range
def obfuscate_string(a_str):
@@ -60,104 +63,104 @@ class TestGMVault(unittest.TestCase): #pylint:disable-msg=R0904
def __init__(self, stuff):
""" constructor """
super(TestGMVault, self).__init__(stuff)
-
+
self.login = None
self.passwd = None
-
+
self.gmvault_login = None
- self.gmvault_passwd = None
-
+ self.gmvault_passwd = None
+
def setUp(self): #pylint:disable-msg=C0103
self.login, self.passwd = read_password_file('/homespace/gaubert/.ssh/passwd')
-
+
self.gmvault_login, self.gmvault_passwd = read_password_file('/homespace/gaubert/.ssh/gsync_passwd')
-
-
+
+
def ztest_gmvault_connect_error(self):
"""
Test connect error (connect to a wrong port). Too long to check
"""
gimap = imap_utils.GIMAPFetcher('imap.gmafil.com', 80, "badlogin", "badpassword")
-
+
try:
gimap.connect()
- except ssl.SSLError, err:
-
+ except ssl.SSLError as err:
+
msg = str(err)
-
+
if not msg.startswith('[Errno 8] _ssl.c:') or not msg.endswith('EOF occurred in violation of protocol'):
self.fail('received %s. Bad error message' % (msg))
-
+
def ztest_gmvault_get_capabilities(self):
"""
Test simple retrieval
"""
gimap = imap_utils.GIMAPFetcher('imap.gmail.com', 993, self.login, self.passwd)
-
+
gimap.connect()
-
+
self.assertEquals(('IMAP4REV1', 'UNSELECT', \
'IDLE', 'NAMESPACE', \
'QUOTA', 'ID', 'XLIST', \
'CHILDREN', 'X-GM-EXT-1', \
'XYZZY', 'SASL-IR', 'AUTH=XOAUTH') , gimap.get_capabilities())
-
+
def ztest_gmvault_check_gmailness(self):
"""
Test simple retrieval
"""
gimap = imap_utils.GIMAPFetcher('imap.gmail.com', 993, self.login, self.passwd)
-
+
gimap.connect()
-
+
self.assertEquals( True , gimap.check_gmailness())
-
+
def ztest_gmvault_compression(self):
"""
Test simple retrieval
"""
gimap = imap_utils.GIMAPFetcher('imap.gmail.com', 993, self.login, self.passwd)
-
+
gimap.connect()
-
+
gimap.enable_compression()
-
+
self.assertEquals( True , gimap.check_gmailness())
-
+
criteria = ['Before 1-Jan-2011']
ids = gimap.search(criteria)
-
+
self.assertEquals(len(ids), 33577)
-
+
def ztest_created_nested_dirs(self):
""" Try to create nested dirs """
client = mod_imap.MonkeyIMAPClient('imap.gmail.com', port = 993, use_uid = True, ssl= True)
-
+
client.login(self.gmvault_login, self.gmvault_passwd)
-
+
folders_info = client.list_folders()
-
+
print(folders_info)
-
+
folders = [ the_dir for (_, _, the_dir) in folders_info ]
-
- print('folders %s\n' %(folders))
+
+ print(('folders %s\n' %(folders)))
the_dir = 'ECMWF-Archive'
#dir = 'test'
if the_dir not in folders:
res = client.create_folder(dir)
print(res)
-
+
folders = [ the_dir for (_, _, dir) in folders_info ]
-
- print('folders %s\n' %(folders))
+
+ print(('folders %s\n' %(folders)))
the_dir = 'ECMWF-Archive/ecmwf-simdat'
#dir = 'test/test-1'
if the_dir not in folders:
res = client.create_folder(the_dir)
print(res)
-
+
def zztest_create_gmail_labels_upper_case(self):
"""
validate the label creation at the imap fetcher level.
@@ -165,221 +168,221 @@ def zztest_create_gmail_labels_upper_case(self):
"""
gs_credential = { 'type' : 'passwd', 'value': self.gmvault_passwd}
gimap = imap_utils.GIMAPFetcher('imap.gmail.com', 993, self.gmvault_login, gs_credential)
-
+
gimap.connect()
-
-
+
+
print("\nCreate labels.\n")
-
+
labels_to_create = ['0','A','a', 'B/C', 'B/C/d', 'B/C/d/e', 'c/d']
-
+
existing_folders = set()
-
+
existing_folders = gimap.create_gmail_labels(labels_to_create, existing_folders)
-
- print("folders = %s\n" % (existing_folders))
+
+ print(("folders = %s\n" % (existing_folders)))
for label in labels_to_create:
- self.assertTrue( (label.lower() in existing_folders) )
-
+ self.assertTrue( (label.lower() in existing_folders) )
+
labels_to_create = ['0','A','a', 'B/C', 'B/C/d', 'B/C/d/e', 'c/d', 'diablo3', 'blizzard', 'blizzard/diablo']
#labels_to_create = ['B/c', u'[Imap]/Trash', u'[Imap]/Sent', 'a', 'A', 'e/f/g', 'b/c/d', ]
-
+
existing_folders = set()
-
+
existing_folders = gimap.create_gmail_labels(labels_to_create, existing_folders)
-
- print("folders = %s\n" % (existing_folders))
+
+ print(("folders = %s\n" % (existing_folders)))
for label in labels_to_create:
- self.assertTrue( (label.lower() in existing_folders) )
-
+ self.assertTrue( (label.lower() in existing_folders) )
+
print("Delete labels\n")
-
+
gimap.delete_gmail_labels(labels_to_create)
-
+
#get existing directories (or label parts)
folders = [ directory.lower() for (_, _, directory) in gimap.get_all_folders() ]
-
+
for label in labels_to_create: #check that they have been deleted
self.assertFalse( (label.lower() in folders) )
-
+
def zztest_create_gmail_labels_android(self):
"""
Handle labels with [Imap]
"""
gs_credential = { 'type' : 'passwd', 'value': self.gmvault_passwd}
gimap = imap_utils.GIMAPFetcher('imap.gmail.com', 993, self.gmvault_login, gs_credential)
-
+
gimap.connect()
-
+
print("\nCreate labels.\n")
-
+
labels_to_create = [u'[IMAP]/Trash', u'[IMAP]/Sent']
-
+
existing_folders = set()
-
+
existing_folders = gimap.create_gmail_labels(labels_to_create, existing_folders)
-
+
#get existing directories (or label parts)
#print("xlist folders = %s\n" % (gimap.get_all_folders()) )
-
+
#folders = [ directory.lower() for (flags, delim, directory) in gimap.server.list_folders() ]
folders = [ directory.lower() for directory in existing_folders ]
-
- print("folders = %s\n" % (folders))
+
+ print(("folders = %s\n" % (folders)))
for label in labels_to_create:
- self.assertTrue( (label.lower() in folders) )
-
+ self.assertTrue( (label.lower() in folders) )
+
# second creation
labels_to_create = [u'[RETEST]', u'[RETEST]/test', u'[RETEST]/Trash', u'[IMAP]/Trash', u'[IMAP]/Draft', u'[IMAP]/Sent', u'[IMAP]']
-
+
existing_folders = gimap.create_gmail_labels(labels_to_create, existing_folders)
-
+
folders = [ directory.lower() for directory in existing_folders ]
-
- print("folders = %s" % (folders))
+
+ print(("folders = %s" % (folders)))
for label in labels_to_create:
- self.assertTrue( (label.lower() in folders) )
-
+ self.assertTrue( (label.lower() in folders) )
+
#it isn't possible to delete the [IMAP]/Sent, [IMAP]/Draft [IMAP]/Trash labels
# I give up and do not delete them in the test
- labels_to_delete = [u'[RETEST]', u'[RETEST]/test', u'[RETEST]/Trash']
-
+ labels_to_delete = [u'[RETEST]', u'[RETEST]/test', u'[RETEST]/Trash']
+
print("Delete labels\n")
-
+
# delete them
gimap.delete_gmail_labels(labels_to_delete)
-
+
#get existing directories (or label parts)
folders = [ directory.lower() for (_, _, directory) in gimap.get_all_folders() ]
-
+
for label in labels_to_delete: #check that they have been deleted
self.assertFalse( (label.lower() in folders) )
-
-
-
+
+
+
def ztest_gmvault_simple_search(self):
"""
search all emails before 01.01.2005
"""
gimap = imap_utils.GIMAPFetcher('imap.gmail.com', 993, self.login, self.passwd)
-
+
gimap.connect()
-
+
criteria = ['Before 1-Jan-2011']
ids = gimap.search(criteria)
-
+
self.assertEquals(len(ids), 33577)
-
+
def ztest_retrieve_gmail_ids(self):
"""
Get all uid before Sep 2004
- Retrieve all GMAIL IDs
+ Retrieve all GMAIL IDs
"""
gimap = imap_utils.GIMAPFetcher('imap.gmail.com', 993, self.login, self.passwd)
-
+
gimap.connect()
-
+
criteria = ['Before 1-Oct-2004']
#criteria = ['ALL']
ids = gimap.search(criteria)
-
+
res = gimap.fetch(ids, [gimap.GMAIL_ID])
-
- self.assertEquals(res, {27362: {'X-GM-MSGID': 1147537963432096749L, 'SEQ': 14535}, 27363: {'X-GM-MSGID': 1147537994018957026L, 'SEQ': 14536}})
-
+
+ self.assertEquals(res, {27362: {'X-GM-MSGID': 1147537963432096749, 'SEQ': 14535}, 27363: {'X-GM-MSGID': 1147537994018957026, 'SEQ': 14536}})
+
def ztest_retrieve_all_params(self):
"""
Get all params for a uid
Retrieve all parts for one email
"""
gimap = imap_utils.GIMAPFetcher('imap.gmail.com', 993, self.login, self.passwd)
-
+
gimap.connect()
-
+
criteria = ['Before 1-Oct-2004']
#criteria = ['ALL']
ids = gimap.search(criteria)
-
+
self.assertEquals(len(ids), 2)
-
+
res = gimap.fetch(ids[0], [gimap.GMAIL_ID, gimap.EMAIL_BODY, gimap.GMAIL_THREAD_ID, gimap.GMAIL_LABELS])
-
- self.assertEquals(res[ids[0]][gimap.GMAIL_ID], 1147537963432096749L)
-
+
+ self.assertEquals(res[ids[0]][gimap.GMAIL_ID], 1147537963432096749)
+
self.assertEquals(res[ids[0]][gimap.EMAIL_BODY], \
'Message-ID: <6999505.1094377483218.JavaMail.wwwadm@chewbacca.ecmwf.int>\r\nDate: Sun, 5 Sep 2004 09:44:43 +0000 (GMT)\r\nFrom: Guillaume.Aubert@ecmwf.int\r\nReply-To: Guillaume.Aubert@ecmwf.int\r\nTo: aubert_guillaume@yahoo.fr\r\nSubject: Fwd: [Flickr] Guillaume Aubert wants you to see their photos\r\nMime-Version: 1.0\r\nContent-Type: text/plain; charset=us-ascii\r\nContent-Transfer-Encoding: 7bit\r\nX-Mailer: jwma\r\nStatus: RO\r\nX-Status: \r\nX-Keywords: \r\nX-UID: 1\r\n\r\n\r\n') #pylint:disable-msg=C0301
-
+
def ztest_gmvault_retrieve_email_store_and_read(self): #pylint:disable-msg=C0103
"""
Retrieve an email store it on disk and read it
"""
storage_dir = '/tmp/gmail_bk'
gmvault_utils.delete_all_under(storage_dir)
-
+
gimap = imap_utils.GIMAPFetcher('imap.gmail.com', 993, self.login, self.passwd)
gstorer = gmvault.GmailStorer(storage_dir)
-
+
gimap.connect()
-
+
criteria = ['Before 1-Oct-2006']
#criteria = ['ALL']
ids = gimap.search(criteria)
-
+
the_id = ids[124]
-
+
res = gimap.fetch(the_id, gimap.GET_ALL_INFO)
-
+
gm_id = gstorer.bury_email(res[the_id])
-
+
metadata, data = gstorer.unbury_email(gm_id)
-
+
self.assertEquals(res[the_id][gimap.GMAIL_ID], metadata['gm_id'])
self.assertEquals(res[the_id][gimap.EMAIL_BODY], data)
self.assertEquals(res[the_id][gimap.GMAIL_THREAD_ID], metadata['thread_ids'])
-
+
labels = []
for label in res[the_id][gimap.GMAIL_LABELS]:
labels.append(label)
-
+
self.assertEquals(labels, metadata['labels'])
-
+
def ztest_gmvault_compress_retrieve_email_store_and_read(self): #pylint:disable-msg=C0103
"""
Activate compression and retrieve an email store it on disk and read it
"""
storage_dir = '/tmp/gmail_bk'
gmvault_utils.delete_all_under(storage_dir)
-
+
gimap = imap_utils.GIMAPFetcher('imap.gmail.com', 993, self.login, self.passwd)
-
+
gstorer = gmvault.GmailStorer(storage_dir)
-
+
gimap.connect()
-
+
gimap.enable_compression()
-
+
criteria = ['Before 1-Oct-2006']
#criteria = ['ALL']
ids = gimap.search(criteria)
-
+
the_id = ids[124]
-
+
res = gimap.fetch(the_id, gimap.GET_ALL_INFO)
-
+
gm_id = gstorer.bury_email(res[the_id])
-
+
metadata, data = gstorer.unbury_email(gm_id)
-
+
self.assertEquals(res[the_id][gimap.GMAIL_ID], metadata['gm_id'])
self.assertEquals(res[the_id][gimap.EMAIL_BODY], data)
self.assertEquals(res[the_id][gimap.GMAIL_THREAD_ID], metadata['thread_ids'])
-
+
labels = []
for label in res[the_id][gimap.GMAIL_LABELS]:
labels.append(label)
-
+
self.assertEquals(labels, metadata['labels'])
-
+
def ztest_gmvault_retrieve_multiple_emails_store_and_read(self): #pylint:disable-msg=C0103
"""
Retrieve emails store them it on disk and read it
@@ -388,36 +391,36 @@ def ztest_gmvault_retrieve_multiple_emails_store_and_read(self): #pylint:disable
gmvault_utils.delete_all_under(storage_dir)
gimap = imap_utils.GIMAPFetcher('imap.gmail.com', 993, self.login, self.passwd)
gstorer = gmvault.GmailStorer(storage_dir)
-
+
gimap.connect()
-
+
criteria = ['Before 1-Oct-2006']
#criteria = ['ALL']
ids = gimap.search(criteria)
-
+
#get 30 emails
for index in range(9, 40):
-
- print("retrieve email index %d\n" % (index))
+
+ print(("retrieve email index %d\n" % (index)))
the_id = ids[index]
-
+
res = gimap.fetch(the_id, gimap.GET_ALL_INFO)
-
+
gm_id = gstorer.bury_email(res[the_id])
-
- print("restore email index %d\n" % (index))
+
+ print(("restore email index %d\n" % (index)))
metadata, data = gstorer.unbury_email(gm_id)
-
+
self.assertEquals(res[the_id][gimap.GMAIL_ID], metadata['gm_id'])
self.assertEquals(res[the_id][gimap.EMAIL_BODY], data)
self.assertEquals(res[the_id][gimap.GMAIL_THREAD_ID], metadata['thread_ids'])
-
+
labels = []
for label in res[the_id][gimap.GMAIL_LABELS]:
labels.append(label)
-
+
self.assertEquals(labels, metadata['labels'])
-
+
def ztest_gmvault_store_gzip_email_and_read(self): #pylint:disable-msg=C0103
"""
Retrieve emails store them it on disk and read it
@@ -425,78 +428,78 @@ def ztest_gmvault_store_gzip_email_and_read(self): #pylint:disable-msg=C0103
storage_dir = '/tmp/gmail_bk'
gmvault_utils.delete_all_under(storage_dir)
gimap = imap_utils.GIMAPFetcher('imap.gmail.com', 993, self.login, self.passwd)
-
+
gstorer = gmvault.GmailStorer(storage_dir)
-
+
gimap.connect()
-
+
criteria = ['Before 1-Oct-2006']
#criteria = ['ALL']
ids = gimap.search(criteria)
-
+
#get 30 emails
for index in range(9, 20):
-
- print("retrieve email index %d\n" % (index))
+
+ print(("retrieve email index %d\n" % (index)))
the_id = ids[index]
-
+
res = gimap.fetch(the_id, gimap.GET_ALL_INFO)
-
+
gm_id = gstorer.bury_email(res[the_id], compress = True)
-
- print("restore email index %d\n" % (index))
+
+ print(("restore email index %d\n" % (index)))
metadata, data = gstorer.unbury_email(gm_id)
-
+
self.assertEquals(res[the_id][gimap.GMAIL_ID], metadata['gm_id'])
self.assertEquals(res[the_id][gimap.EMAIL_BODY], data)
self.assertEquals(res[the_id][gimap.GMAIL_THREAD_ID], metadata['thread_ids'])
-
+
labels = []
for label in res[the_id][gimap.GMAIL_LABELS]:
labels.append(label)
-
+
self.assertEquals(labels, metadata['labels'])
-
+
def ztest_restore_one_email(self):
"""
get one email from one account and restore it
"""
gsource = imap_utils.GIMAPFetcher('imap.gmail.com', 993, self.login, self.passwd)
gdestination = imap_utils.GIMAPFetcher('imap.gmail.com', 993, self.gmvault_login, self.gmvault_passwd, readonly_folder = False)
-
+
gsource.connect()
gdestination.connect()
-
+
criteria = ['Before 1-Oct-2006']
#criteria = ['ALL']
ids = gsource.search(criteria)
-
+
the_id = ids[0]
-
+
source_email = gsource.fetch(the_id, gsource.GET_ALL_INFO)
-
+
existing_labels = source_email[the_id][gsource.GMAIL_LABELS]
-
+
test_labels = []
for elem in existing_labels:
test_labels.append(elem)
-
+
#source_email[the_id][gsource.IMAP_INTERNALDATE] = source_email[the_id][gsource.IMAP_INTERNALDATE].replace(tzinfo= gmvault_utils.UTC_TZ)
-
+
dest_id = gdestination.push_email(source_email[the_id][gsource.EMAIL_BODY], \
source_email[the_id][gsource.IMAP_FLAGS] , \
source_email[the_id][gsource.IMAP_INTERNALDATE], test_labels)
-
+
dest_email = gdestination.fetch(dest_id, gsource.GET_ALL_INFO)
-
+
# do the checkings
self.assertEquals(dest_email[dest_id][gsource.IMAP_FLAGS], source_email[the_id][gsource.IMAP_FLAGS])
self.assertEquals(dest_email[dest_id][gsource.EMAIL_BODY], source_email[the_id][gsource.EMAIL_BODY])
self.assertEquals(dest_email[dest_id][gsource.GMAIL_LABELS], source_email[the_id][gsource.GMAIL_LABELS])
-
+
#should be ok to be checked
self.assertEquals(dest_email[dest_id][gsource.IMAP_INTERNALDATE], source_email[the_id][gsource.IMAP_INTERNALDATE])
-
+
def ztest_restore_10_emails(self):
"""
Restore 10 emails
@@ -504,78 +507,78 @@ def ztest_restore_10_emails(self):
gsource = imap_utils.GIMAPFetcher('imap.gmail.com', 993, self.login, self.passwd)
gdestination = imap_utils.GIMAPFetcher('imap.gmail.com', 993, self.gmvault_login, self.gmvault_passwd, \
readonly_folder = False)
-
+
gsource.connect()
gdestination.connect()
-
+
criteria = ['Before 1-Oct-2008']
#criteria = ['ALL']
ids = gsource.search(criteria)
-
+
#get 30 emails
for index in range(9, 20):
-
- print("email nb %d\n" % (index))
-
+
+ print(("email nb %d\n" % (index)))
+
the_id = ids[index]
-
+
source_email = gsource.fetch(the_id, gsource.GET_ALL_INFO)
-
+
existing_labels = source_email[the_id][gsource.GMAIL_LABELS]
-
+
# get labels
test_labels = []
for elem in existing_labels:
test_labels.append(elem)
-
+
dest_id = gdestination.push_email(source_email[the_id][gsource.EMAIL_BODY], \
source_email[the_id][gsource.IMAP_FLAGS] , \
source_email[the_id][gsource.IMAP_INTERNALDATE], test_labels)
-
+
#retrieve email from destination email account
dest_email = gdestination.fetch(dest_id, gsource.GET_ALL_INFO)
-
+
#check that it has the same
# do the checkings
self.assertEquals(dest_email[dest_id][gsource.IMAP_FLAGS], source_email[the_id][gsource.IMAP_FLAGS])
self.assertEquals(dest_email[dest_id][gsource.EMAIL_BODY], source_email[the_id][gsource.EMAIL_BODY])
-
+
dest_labels = []
for elem in dest_email[dest_id][gsource.GMAIL_LABELS]:
if not elem == '\\Important':
dest_labels.append(elem)
-
+
src_labels = []
for elem in source_email[the_id][gsource.GMAIL_LABELS]:
if not elem == '\\Important':
src_labels.append(elem)
-
+
self.assertEquals(dest_labels, src_labels)
-
+
def ztest_few_days_syncer(self):
"""
Test with the Syncer object
"""
syncer = gmvault.GMVaulter('/tmp/gmail_bk', 'imap.gmail.com', 993, self.login, self.passwd)
-
+
syncer.sync(imap_req = "Since 1-Nov-2011 Before 4-Nov-2011")
-
+
storage_dir = "%s/%s" % ('/tmp/gmail_bk/db', '2011-11')
-
+
gstorer = gmvault.GmailStorer('/tmp/gmail_bk')
-
+
metadata = gmvault.GMVaulter.check_email_on_disk(gstorer, 1384313269332005293)
-
+
self.assertEquals(metadata['gm_id'], 1384313269332005293)
-
+
metadata = gmvault.GMVaulter.check_email_on_disk(gstorer, 1384403887202624608)
-
+
self.assertEquals(metadata['gm_id'], 1384403887202624608)
-
+
metadata = gmvault.GMVaulter.check_email_on_disk(gstorer, 1384486067720566818)
-
+
self.assertEquals(metadata['gm_id'], 1384486067720566818)
-
+
def ztest_few_days_syncer_with_deletion(self): #pylint:disable-msg=C0103
"""
check that there was a deletion
@@ -583,24 +586,24 @@ def ztest_few_days_syncer_with_deletion(self): #pylint:disable-msg=C0103
db_dir = '/tmp/gmail_bk'
#clean db dir
delete_db_dir(db_dir)
-
+
#copy test email in dest dir
storage_dir = "%s/db/%s" % (db_dir, '2011-11')
-
+
gmvault_utils.makedirs(storage_dir)
-
+
shutil.copyfile('../etc/tests/test_few_days_syncer/2384403887202624608.eml.gz','%s/2384403887202624608.eml.gz' % (storage_dir))
shutil.copyfile('../etc/tests/test_few_days_syncer/2384403887202624608.meta','%s/2384403887202624608.meta' % (storage_dir))
-
+
syncer = gmvault.GMVaulter('/tmp/gmail_bk', 'imap.gmail.com', 993, self.login, self.passwd)
-
+
syncer.sync(imap_req = "Since 1-Nov-2011 Before 2-Nov-2011", db_cleaning = True)
-
+
self.assertFalse(os.path.exists('%s/2384403887202624608.eml.gz' % (storage_dir)))
self.assertFalse(os.path.exists('%s/2384403887202624608.meta' % (storage_dir)))
self.assertTrue(os.path.exists('%s/1384313269332005293.meta' % (storage_dir)))
self.assertTrue(os.path.exists('%s/1384313269332005293.eml.gz' % (storage_dir)))
-
+
def ztest_encrypt_restore_on_gmail(self):
"""
Doesn't work to be fixed
@@ -608,71 +611,71 @@ def ztest_encrypt_restore_on_gmail(self):
sync with gmail for few emails
restore them on gmail test
"""
-
+
db_dir = '/tmp/gmail_bk'
#clean db dir
delete_db_dir(db_dir)
-
+
credential = { 'type' : 'passwd', 'value': self.passwd}
search_req = { 'type' : 'imap', 'req': "Since 1-Nov-2011 Before 3-Nov-2011"}
-
+
use_encryption = True
syncer = gmvault.GMVaulter(db_dir, 'imap.gmail.com', 993, self.login, credential, read_only_access = True, use_encryption = use_encryption)
-
+
syncer.sync(imap_req = search_req)
-
+
# check that the email can be read
gstorer = gmvault.GmailStorer('/tmp/gmail_bk', use_encryption)
-
+
metadata = gmvault.GMVaulter.check_email_on_disk(gstorer, 1384313269332005293)
-
+
self.assertEquals(metadata['gm_id'], 1384313269332005293)
-
+
email_meta, email_data = gstorer.unbury_email(1384313269332005293)
-
+
self.assertTrue(email_data.startswith("Delivered-To: guillaume.aubert@gmail.com"))
-
+
#print("Email Data = \n%s\n" % (email_data))
-
+
print("Done \n")
-
+
def ztest_fix_bug_search_broken_gm_id_and_quarantine(self):
"""
Search with a gm_id and quarantine it
"""
db_dir = '/tmp/gmail_bk'
-
+
#clean db dir
delete_db_dir(db_dir)
-
+
credential = { 'type' : 'passwd', 'value': self.passwd}
gs_credential = { 'type' : 'passwd', 'value': self.gmvault_passwd}
gstorer = gmvault.GmailStorer(db_dir)
gimap = imap_utils.GIMAPFetcher('imap.gmail.com', 993, self.login, credential)
-
+
gimap.connect()
-
+
criteria = { 'type': 'imap', 'req' :['X-GM-MSGID 1254269417797093924']} #broken one
#criteria = ['X-GM-MSGID 1254267782370534098']
#criteria = ['ALL']
ids = gimap.search(criteria)
-
+
for the_id in ids:
res = gimap.fetch(the_id, gimap.GET_ALL_INFO)
-
+
gm_id = gstorer.bury_email(res[the_id], compress = True)
-
+
syncer = gmvault.GMVaulter(db_dir, 'imap.gmail.com', 993, self.gmvault_login, gs_credential)
-
+
syncer.restore()
-
-
+
+
#check that the file has been quarantine
quarantine_dir = '%s/quarantine' %(db_dir)
-
+
self.assertTrue(os.path.exists('%s/1254269417797093924.eml.gz' % (quarantine_dir)))
self.assertTrue(os.path.exists('%s/1254269417797093924.meta' % (quarantine_dir)))
-
+
def ztest_fix_bug(self):
"""
bug with uid 142221L => empty email returned by gmail
@@ -680,30 +683,30 @@ def ztest_fix_bug(self):
db_dir = '/tmp/gmail_bk'
credential = { 'type' : 'passwd', 'value': self.passwd}
syncer = gmvault.GMVaulter(db_dir, 'imap.gmail.com', 993, self.login, credential, 'verySecRetKeY')
-
- syncer._create_update_sync([142221L], compress = True)
-
+
+ syncer._create_update_sync([142221], compress = True)
+
def test_check_flags(self):
"""
- Check flags
+ Check flags
"""
credential = { 'type' : 'passwd', 'value': self.passwd}
#print("credential %s\n" % (credential))
gimap = imap_utils.GIMAPFetcher('imap.gmail.com', 993, self.login, credential)
-
+
gimap.connect()
-
+
imap_ids = [155182]
gmail_id = 1405877259414135030
imap_ids = [155070]
-
+
#res = gimap.fetch(imap_ids, [gimap.GMAIL_ID, gimap.IMAP_FLAGS])
res = gimap.fetch(imap_ids, gimap.GET_ALL_BUT_DATA)
-
+
print(res)
-
-
+
+
def tests():
"""
@@ -711,7 +714,7 @@ def tests():
"""
suite = unittest.TestLoader().loadTestsFromTestCase(TestGMVault)
unittest.TextTestRunner(verbosity=2).run(suite)
-
+
if __name__ == '__main__':
-
+
tests()
diff --git a/src/perf_tests.py b/src/perf_tests.py
index d65c1ef3..14f37466 100755
--- a/src/perf_tests.py
+++ b/src/perf_tests.py
@@ -16,11 +16,14 @@
along with this program. If not, see .
'''
+from __future__ import absolute_import, print_function
+
import unittest
import datetime
import os
import gmv.gmvault_utils as gmvault_utils
import gmv.collections_utils as collections_utils
+from six.moves import range
class TestPerf(unittest.TestCase): #pylint:disable-msg=R0904
@@ -31,11 +34,11 @@ class TestPerf(unittest.TestCase): #pylint:disable-msg=R0904
def __init__(self, stuff):
""" constructor """
super(TestPerf, self).__init__(stuff)
-
-
+
+
def setUp(self): #pylint:disable-msg=C0103
pass
-
+
def _create_dirs(self, working_dir, nb_dirs, nb_files_per_dir):
"""
create all the dirs and files
@@ -43,13 +46,13 @@ def _create_dirs(self, working_dir, nb_dirs, nb_files_per_dir):
dirname = 'dir_%d'
data_file = '%d.eml'
meta_file = '%d.meta'
-
- for nb in xrange(0, nb_dirs):
+
+ for nb in range(0, nb_dirs):
#make dir
the_dir = '%s/%s' % (working_dir, dirname % nb)
gmvault_utils.makedirs(the_dir)
- for file_id in xrange(0,nb_files_per_dir):
+ for file_id in range(0,nb_files_per_dir):
#create data file
with open('%s/%s_%s' % (the_dir, dirname % nb,
data_file % file_id), 'w') as f:
@@ -62,7 +65,7 @@ def _create_dirs(self, working_dir, nb_dirs, nb_files_per_dir):
def test_read_lots_of_files(self):
"""
Test to mesure how long it takes to list over 100 000 files
- On server: 250 000 meta files in 50 dirs (50,5000) => 9.74 sec to list them
+ On server: 250 000 meta files in 50 dirs (50,5000) => 9.74 sec to list them
100 000 meta files in 20 dirs (20,5000) => 3.068 sec to list them
60 000 meta files in 60 dirs (60,1000) => 1.826 sec to list them
On linux macbook pro linux virtual machine:
@@ -73,30 +76,30 @@ def test_read_lots_of_files(self):
250 000 meta files in 50 dirs (50,5000) => 56.50 sec (3min 27 sec if dir created and listed afterward) to list them
100 000 meta files in 20 dirs (20,5000) => 20.1 sec to list them
60 000 meta files in 60 dirs (60,1000) => 9.96 sec to list them
-
+
"""
root_dir = '/tmp/dirs'
#create dirs and files
#t1 = datetime.datetime.now()
#self._create_dirs('/tmp/dirs', 50, 5000)
#t2 = datetime.datetime.now()
-
+
#print("\nTime to create dirs : %s\n" % (t2-t1))
#print("\nFiles and dirs created.\n")
-
+
the_iter = gmvault_utils.dirwalk(root_dir, a_wildcards= '*.meta')
t1 = datetime.datetime.now()
-
+
gmail_ids = collections_utils.OrderedDict()
-
+
for filepath in the_iter:
directory, fname = os.path.split(filepath)
gmail_ids[os.path.splitext(fname)[0]] = os.path.basename(directory)
t2 = datetime.datetime.now()
-
- print("\nnb of files = %s" % (len(gmail_ids.keys())))
- print("\nTime to read all meta files : %s\n" % (t2-t1))
-
+
+ print(("\nnb of files = %s" % (len(list(gmail_ids.keys())))))
+ print(("\nTime to read all meta files : %s\n" % (t2-t1)))
+
def tests():
"""
@@ -104,7 +107,7 @@ def tests():
"""
suite = unittest.TestLoader().loadTestsFromTestCase(TestPerf)
unittest.TextTestRunner(verbosity=2).run(suite)
-
+
if __name__ == '__main__':
-
+
tests()
diff --git a/src/sandbox/chardet_test.py b/src/sandbox/chardet_test.py
index 1b4bd964..9e4b5ff2 100644
--- a/src/sandbox/chardet_test.py
+++ b/src/sandbox/chardet_test.py
@@ -1,23 +1,31 @@
# -*- coding: utf-8 -*-
+from __future__ import absolute_import, print_function
+
import sys
+
import chardet
-import codecs
-
-print("system encoding: %s" % (sys.getfilesystemencoding()))
-first_arg = sys.argv[1]
-#first_arg="réception"
-#first_arg="て感じでしょうか研"
-print first_arg
-print("chardet = %s\n" % chardet.detect(first_arg))
-res_char = chardet.detect(first_arg)
-print type(first_arg)
-
-
-
-first_arg_unicode = first_arg.decode(res_char['encoding'])
-print first_arg_unicode
-print type(first_arg_unicode)
-
-utf8_arg = first_arg_unicode.encode("utf-8")
-print type(utf8_arg)
-print utf8_arg
\ No newline at end of file
+
+
+def main():
+ print(("system encoding: %s" % (sys.getfilesystemencoding())))
+ first_arg = sys.argv[1]
+ #first_arg="réception"
+ #first_arg="て感じでしょうか研"
+ print(first_arg)
+ print(("chardet = %s\n" % chardet.detect(first_arg)))
+ res_char = chardet.detect(first_arg)
+ print(type(first_arg))
+
+
+
+ first_arg_unicode = first_arg.decode(res_char['encoding'])
+ print(first_arg_unicode)
+ print(type(first_arg_unicode))
+
+ utf8_arg = first_arg_unicode.encode("utf-8")
+ print(type(utf8_arg))
+ print(utf8_arg)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/src/sandbox/common_gmvault.py b/src/sandbox/common_gmvault.py
index b5db3c1d..169ee34c 100755
--- a/src/sandbox/common_gmvault.py
+++ b/src/sandbox/common_gmvault.py
@@ -16,6 +16,7 @@
along with this program. If not, see .
'''
+from __future__ import absolute_import
import json
import time
import datetime
@@ -23,17 +24,17 @@
import itertools
import imaplib
-import gmv.log_utils as log_utils
-import gmv.collections_utils as collections_utils
-import gmv.gmvault_utils as gmvault_utils
-import gmv.imap_utils as imap_utils
-import gmv.gmvault_db as gmvault_db
+from . import log_utils
+from . import collections_utils
+from . import gmvault_utils
+from . import imap_utils
+from . import gmvault_db
LOG = log_utils.LoggerFactory.get_logger('gmvault')
def handle_restore_imap_error(the_exception, gm_id, db_gmail_ids_info, gmvaulter):
"""
- function to handle restore IMAPError in restore functions
+ function to handle restore IMAPError in restore functions
"""
if isinstance(the_exception, imaplib.IMAP4.abort):
# if this is a Gmvault SSL Socket error quarantine the email and continue the restore
@@ -46,31 +47,31 @@ def handle_restore_imap_error(the_exception, gm_id, db_gmail_ids_info, gmvaulter
gmvaulter.src.reconnect() #reconnect
else:
raise the_exception
-
- elif isinstance(the_exception, imaplib.IMAP4.error):
+
+ elif isinstance(the_exception, imaplib.IMAP4.error):
LOG.error("Catched IMAP Error %s" % (str(the_exception)))
LOG.exception(the_exception)
-
+
#When the email cannot be read from Database because it was empty when returned by gmail imap
#quarantine it.
if str(the_exception) == "APPEND command error: BAD ['Invalid Arguments: Unable to parse message']":
LOG.critical("Quarantine email with gm id %s from %s. GMAIL IMAP cannot restore it:"\
" err={%s}" % (gm_id, db_gmail_ids_info[gm_id], str(the_exception)))
gmvaulter.gstorer.quarantine_email(gm_id)
- gmvaulter.error_report['emails_in_quarantine'].append(gm_id)
+ gmvaulter.error_report['emails_in_quarantine'].append(gm_id)
else:
raise the_exception
elif isinstance(the_exception, imap_utils.PushEmailError):
LOG.error("Catch the following exception %s" % (str(the_exception)))
LOG.exception(the_exception)
-
+
if the_exception.quarantined():
LOG.critical("Quarantine email with gm id %s from %s. GMAIL IMAP cannot restore it:"\
" err={%s}" % (gm_id, db_gmail_ids_info[gm_id], str(the_exception)))
gmvaulter.gstorer.quarantine_email(gm_id)
- gmvaulter.error_report['emails_in_quarantine'].append(gm_id)
+ gmvaulter.error_report['emails_in_quarantine'].append(gm_id)
else:
- raise the_exception
+ raise the_exception
else:
LOG.error("Catch the following exception %s" % (str(the_exception)))
LOG.exception(the_exception)
@@ -80,10 +81,10 @@ def handle_sync_imap_error(the_exception, the_id, error_report, src):
"""
function to handle IMAPError in gmvault
type = chat or email
- """
+ """
if isinstance(the_exception, imaplib.IMAP4.abort):
- # imap abort error
- # ignore it
+ # imap abort error
+ # ignore it
# will have to do something with these ignored messages
LOG.critical("Error while fetching message with imap id %s." % (the_id))
LOG.critical("\n=== Exception traceback ===\n")
@@ -91,8 +92,8 @@ def handle_sync_imap_error(the_exception, the_id, error_report, src):
LOG.critical("=== End of Exception traceback ===\n")
try:
#try to get the gmail_id
- curr = src.fetch(the_id, imap_utils.GIMAPFetcher.GET_GMAIL_ID)
- except Exception, _: #pylint:disable-msg=W0703
+ curr = src.fetch(the_id, imap_utils.GIMAPFetcher.GET_GMAIL_ID)
+ except Exception as _: #pylint:disable-msg=W0703
curr = None
LOG.critical("Error when trying to get gmail id for message with imap id %s." % (the_id))
LOG.critical("Disconnect, wait for 20 sec then reconnect.")
@@ -102,53 +103,53 @@ def handle_sync_imap_error(the_exception, the_id, error_report, src):
time.sleep(10)
LOG.critical("Reconnecting ...")
src.connect()
-
+
if curr:
gmail_id = curr[the_id][imap_utils.GIMAPFetcher.GMAIL_ID]
else:
gmail_id = None
-
+
#add ignored id
error_report['cannot_be_fetched'].append((the_id, gmail_id))
-
+
LOG.critical("Forced to ignore message with imap id %s, (gmail id %s)." \
% (the_id, (gmail_id if gmail_id else "cannot be read")))
-
+
elif isinstance(the_exception, imaplib.IMAP4.error):
- # check if this is a cannot be fetched error
+ # check if this is a cannot be fetched error
# I do not like to do string guessing within an exception but I do not have any choice here
LOG.critical("Error while fetching message with imap id %s." % (the_id))
LOG.critical("\n=== Exception traceback ===\n")
LOG.critical(gmvault_utils.get_exception_traceback())
LOG.critical("=== End of Exception traceback ===\n")
-
+
#quarantine emails that have raised an abort error
if str(the_exception).find("'Some messages could not be FETCHed (Failure)'") >= 0:
try:
#try to get the gmail_id
LOG.critical("One more attempt. Trying to fetch the Gmail ID for %s" % (the_id) )
- curr = src.fetch(the_id, imap_utils.GIMAPFetcher.GET_GMAIL_ID)
- except Exception, _: #pylint:disable-msg=W0703
+ curr = src.fetch(the_id, imap_utils.GIMAPFetcher.GET_GMAIL_ID)
+ except Exception as _: #pylint:disable-msg=W0703
curr = None
-
+
if curr:
gmail_id = curr[the_id][imap_utils.GIMAPFetcher.GMAIL_ID]
else:
gmail_id = None
-
+
#add ignored id
error_report['cannot_be_fetched'].append((the_id, gmail_id))
-
+
LOG.critical("Ignore message with imap id %s, (gmail id %s)" % (the_id, (gmail_id if gmail_id else "cannot be read")))
-
+
else:
raise the_exception #rethrow error
else:
- raise the_exception
+ raise the_exception
class IMAPBatchFetcher(object):
"""
- Fetch IMAP data in batch
+ Fetch IMAP data in batch
"""
def __init__(self, src, imap_ids, error_report, request, default_batch_size = 100):
"""
@@ -158,77 +159,77 @@ def __init__(self, src, imap_ids, error_report, request, default_batch_size = 10
self.imap_ids = imap_ids
self.def_batch_size = default_batch_size
self.request = request
- self.error_report = error_report
-
+ self.error_report = error_report
+
self.to_fetch = list(imap_ids)
-
+
def individual_fetch(self, imap_ids):
"""
Find the imap_id creating the issue
return the data related to the imap_ids
"""
new_data = {}
- for the_id in imap_ids:
- try:
+ for the_id in imap_ids:
+ try:
single_data = self.src.fetch(the_id, self.request)
- new_data.update(single_data)
- except Exception, error:
+ new_data.update(single_data)
+ except Exception as error:
handle_sync_imap_error(error, the_id, self.error_report, self.src) #do everything in this handler
return new_data
-
+
def __iter__(self):
- return self
-
+ return self
+
def next(self):
"""
Return the next batch of elements
"""
new_data = {}
batch = self.to_fetch[:self.def_batch_size]
-
+
if len(batch) <= 0:
raise StopIteration
-
+
try:
-
+
new_data = self.src.fetch(batch, self.request)
-
+
self.to_fetch = self.to_fetch[self.def_batch_size:]
-
+
return new_data
- except imaplib.IMAP4.error, _:
- new_data = self.individual_fetch(batch)
-
+ except imaplib.IMAP4.error as _:
+ new_data = self.individual_fetch(batch)
+
return new_data
-
+
def reset(self):
"""
Restart from the beginning
"""
- self.to_fetch = self.imap_ids
-
+ self.to_fetch = self.imap_ids
+
#Client to support imap serch with non ascii char (not working because of imaplibs limitations)
'''class MonkeyIMAPClient(imapclient.IMAPClient): #pylint:disable=R0903,R0904
"""
Need to extend the IMAPClient to do more things such as compression
Compression inspired by http://www.janeelix.com/piers/python/py2html.cgi/piers/python/imaplib2
"""
-
+
def __init__(self, host, port=None, use_uid=True, need_ssl=False):
"""
constructor
"""
super(MonkeyIMAPClient, self).__init__(host, port, use_uid, need_ssl)
-
+
def _create_IMAP4(self): #pylint:disable=C0103
"""
Factory method creating an IMAPCOMPSSL or a standard IMAP4 Class
"""
imap_class = self.ssl and IMAP4COMPSSL or imaplib.IMAP4
return imap_class(self.host, self.port)
-
+
def xoauth_login(self, xoauth_cred ):
"""
Connect with xoauth
@@ -237,8 +238,8 @@ def xoauth_login(self, xoauth_cred ):
typ, data = self._imap.authenticate('XOAUTH', lambda x: xoauth_cred)
self._checkok('authenticate', typ, data)
- return data[0]
-
+ return data[0]
+
def old_search(self, criteria):
"""
Perform a imap search or gmail search
@@ -252,7 +253,7 @@ def old_search(self, criteria):
return self.gmail_search(criteria.get('req',''))
else:
raise Exception("Unknown search type %s" % (criteria.get('type','no request type passed')))
-
+
def search(self, criteria):
"""
Perform a imap search or gmail search
@@ -264,19 +265,19 @@ def search(self, criteria):
charset = 'utf-8'
#return super(MonkeyIMAPClient, self).search(req, charset)
return self.imap_search(req, charset)
-
+
elif criteria.get('type','') == 'gmail':
return self.gmail_search(criteria.get('req',''))
else:
raise Exception("Unknown search type %s" % (criteria.get('type','no request type passed')))
-
-
-
+
+
+
def gmail_search(self, criteria):
"""
perform a search with gmailsearch criteria.
eg, subject:Hello World
- """
+ """
criteria = criteria.replace('\\', '\\\\')
criteria = criteria.replace('"', '\\"')
@@ -284,24 +285,24 @@ def gmail_search(self, criteria):
#args = ['CHARSET', 'utf-8', 'X-GM-RAW', '"%s"' % (criteria)]
#typ, data = self._imap.uid('SEARCH', *args)
- #working Literal search
+ #working Literal search
self._imap.literal = '"%s"' % (criteria)
self._imap.literal = imaplib.MapCRLF.sub(imaplib.CRLF, self._imap.literal)
self._imap.literal = self._imap.literal.encode("utf-8")
#args = ['X-GM-RAW']
#typ, data = self._imap.search('utf-8',*args)
-
+
#use uid to keep the imap ids consistent
args = ['CHARSET', 'utf-8', 'X-GM-RAW']
typ, data = self._imap.uid('SEARCH', *args)
-
+
self._checkok('search', typ, data)
if data == [None]: # no untagged responses...
return [ ]
return [ long(i) for i in data[0].split() ]
-
+
def append(self, folder, msg, flags=(), msg_time=None):
"""Append a message to *folder*.
@@ -331,7 +332,7 @@ def append(self, folder, msg, flags=(), msg_time=None):
self._checkok('append', typ, data)
return data[0]
-
+
def enable_compression(self):
"""
enable_compression()
@@ -347,74 +348,74 @@ def enable_compression(self):
#no errors for the moment
pass
'''
-
+
class GMVaulter(object):
"""
Main object operating over gmail
- """
+ """
NB_GRP_OF_ITEMS = 1400
EMAIL_RESTORE_PROGRESS = 'email_last_id.restore'
CHAT_RESTORE_PROGRESS = 'chat_last_id.restore'
EMAIL_SYNC_PROGRESS = 'email_last_id.sync'
CHAT_SYNC_PROGRESS = 'chat_last_id.sync'
-
+
OP_EMAIL_RESTORE = "EM_RESTORE"
OP_EMAIL_SYNC = "EM_SYNC"
OP_CHAT_RESTORE = "CH_RESTORE"
OP_CHAT_SYNC = "CH_SYNC"
-
+
OP_TO_FILENAME = { OP_EMAIL_RESTORE : EMAIL_RESTORE_PROGRESS,
OP_EMAIL_SYNC : EMAIL_SYNC_PROGRESS,
OP_CHAT_RESTORE : CHAT_RESTORE_PROGRESS,
OP_CHAT_SYNC : CHAT_SYNC_PROGRESS
}
-
-
+
+
def __init__(self, db_root_dir, host, port, login, \
credential, read_only_access = True, use_encryption = False): #pylint:disable-msg=R0913,R0914
"""
constructor
- """
+ """
self.db_root_dir = db_root_dir
-
+
#create dir if it doesn't exist
gmvault_utils.makedirs(self.db_root_dir)
-
+
#keep track of login email
self.login = login
-
+
# create source and try to connect
self.src = imap_utils.GIMAPFetcher(host, port, login, credential, \
readonly_folder = read_only_access)
-
+
self.src.connect()
-
+
LOG.debug("Connected")
-
+
self.use_encryption = use_encryption
-
+
#to report gmail imap problems
self.error_report = { 'empty' : [] ,
'cannot_be_fetched' : [],
'emails_in_quarantine' : [],
'reconnections' : 0}
-
+
#instantiate gstorer
self.gstorer = gmvault_db.GmailStorer(self.db_root_dir, self.use_encryption)
-
+
#timer used to mesure time spent in the different values
self.timer = gmvault_utils.Timer()
-
+
@classmethod
def get_imap_request_btw_2_dates(cls, begin_date, end_date):
"""
Return the imap request for those 2 dates
"""
imap_req = 'Since %s Before %s' % (gmvault_utils.datetime2imapdate(begin_date), gmvault_utils.datetime2imapdate(end_date))
-
+
return imap_req
-
+
def get_operation_report(self):
"""
Return the error report
@@ -429,11 +430,11 @@ def get_operation_report(self):
len(self.error_report['cannot_be_fetched']), \
len(self.error_report['empty'])
)
-
+
LOG.debug("error_report complete structure = %s" % (self.error_report))
-
+
return the_str
-
+
@classmethod
def _get_next_date(cls, a_current_date, start_month_beginning = False):
"""
@@ -443,10 +444,10 @@ def _get_next_date(cls, a_current_date, start_month_beginning = False):
dummy_date = a_current_date.replace(day=1)
else:
dummy_date = a_current_date
-
+
# the next date = current date + 1 month
return dummy_date + datetime.timedelta(days=31)
-
+
@classmethod
def check_email_on_disk(cls, a_gstorer, a_id, a_dir = None):
"""
@@ -454,15 +455,15 @@ def check_email_on_disk(cls, a_gstorer, a_id, a_dir = None):
"""
try:
a_dir = a_gstorer.get_directory_from_id(a_id, a_dir)
-
+
if a_dir:
- return a_gstorer.unbury_metadata(a_id, a_dir)
-
- except ValueError, json_error:
+ return a_gstorer.unbury_metadata(a_id, a_dir)
+
+ except ValueError as json_error:
LOG.exception("Cannot read file %s. Try to fetch the data again" % ('%s.meta' % (a_id)), json_error )
-
+
return None
-
+
@classmethod
def _metadata_needs_update(cls, curr_metadata, new_metadata, chat_metadata = False):
"""
@@ -470,44 +471,44 @@ def _metadata_needs_update(cls, curr_metadata, new_metadata, chat_metadata = Fal
"""
if curr_metadata[gmvault_db.GmailStorer.ID_K] != new_metadata['X-GM-MSGID']:
raise Exception("Gmail id has changed for %s" % (curr_metadata['id']))
-
- #check flags
- prev_set = set(new_metadata['FLAGS'])
-
+
+ #check flags
+ prev_set = set(new_metadata['FLAGS'])
+
for flag in curr_metadata['flags']:
if flag not in prev_set:
return True
else:
prev_set.remove(flag)
-
+
if len(prev_set) > 0:
return True
-
+
#check labels
prev_labels = set(new_metadata['X-GM-LABELS'])
-
+
if chat_metadata: #add gmvault-chats labels
prev_labels.add(gmvault_db.GmailStorer.CHAT_GM_LABEL)
-
-
+
+
for label in curr_metadata['labels']:
if label not in prev_labels:
return True
else:
prev_labels.remove(label)
-
+
if len(prev_labels) > 0:
return True
-
+
return False
-
-
+
+
def _check_email_db_ownership(self, ownership_control):
"""
Check email database ownership.
If ownership control activated then fail if a new additional owner is added.
Else if no ownership control allow one more user and save it in the list of owners
-
+
Return the number of owner this will be used to activate or not the db clean.
Activating a db cleaning on a multiownership db would be a catastrophy as it would delete all
the emails from the others users.
@@ -521,123 +522,123 @@ def _check_email_db_ownership(self, ownership_control):
% (self.db_root_dir, ", ".join(db_owners), self.login))
else:
if len(db_owners) == 0:
- LOG.critical("Establish %s as the owner of the Gmvault db %s." % (self.login, self.db_root_dir))
+ LOG.critical("Establish %s as the owner of the Gmvault db %s." % (self.login, self.db_root_dir))
elif len(db_owners) > 0 and self.login not in db_owners:
LOG.critical("The email database %s is hosting emails from %s. It will now also store emails from %s" \
% (self.db_root_dir, ", ".join(db_owners), self.login))
-
+
#try to save db_owner in the list of owners
self.gstorer.store_db_owner(self.login)
-
+
def _sync_chats(self, imap_req, compress, restart):
"""
Previous working sync for chats
backup the chat messages
"""
chat_dir = None
-
+
timer = gmvault_utils.Timer() #start local timer for chat
timer.start()
-
+
LOG.debug("Before selection")
if self.src.is_visible('CHATS'):
chat_dir = self.src.select_folder('CHATS')
-
+
LOG.debug("Selection is finished")
if chat_dir:
#imap_ids = self.src.search({ 'type': 'imap', 'req': 'ALL' })
imap_ids = self.src.search(imap_req)
-
+
# check if there is a restart
if restart:
LOG.critical("Restart mode activated. Need to find information in Gmail, be patient ...")
imap_ids = self.get_gmails_ids_left_to_sync(self.OP_CHAT_SYNC, imap_ids)
-
+
total_nb_chats_to_process = len(imap_ids) # total number of emails to get
-
+
LOG.critical("%d chat messages to be fetched." % (total_nb_chats_to_process))
-
+
nb_chats_processed = 0
-
+
to_fetch = set(imap_ids)
batch_fetcher = IMAPBatchFetcher(self.src, imap_ids, self.error_report, \
imap_utils.GIMAPFetcher.GET_ALL_BUT_DATA, \
default_batch_size = \
gmvault_utils.get_conf_defaults().getint("General", \
"nb_messages_per_batch", 500))
-
-
+
+
for new_data in batch_fetcher:
- for the_id in new_data:
- if new_data.get(the_id, None):
+ for the_id in new_data:
+ if new_data.get(the_id, None):
gid = None
-
+
LOG.debug("\nProcess imap chat id %s" % ( the_id ))
-
+
gid = new_data[the_id][imap_utils.GIMAPFetcher.GMAIL_ID]
-
+
the_dir = self.gstorer.get_sub_chats_dir()
-
+
LOG.critical("Process chat num %d (imap_id:%s) into %s." % (nb_chats_processed, the_id, the_dir))
-
+
#pass the dir and the ID
curr_metadata = GMVaulter.check_email_on_disk( self.gstorer , \
new_data[the_id][imap_utils.GIMAPFetcher.GMAIL_ID], \
the_dir)
-
+
#if on disk check that the data is not different
if curr_metadata:
-
+
if self._metadata_needs_update(curr_metadata, new_data[the_id], chat_metadata = True):
-
+
LOG.debug("Chat with imap id %s and gmail id %s has changed. Updated it." % (the_id, gid))
-
+
#restore everything at the moment
gid = self.gstorer.bury_chat_metadata(new_data[the_id], local_dir = the_dir)
-
+
#update local index id gid => index per directory to be thought out
else:
LOG.debug("The metadata for chat %s already exists and is identical to the one on GMail." % (gid))
- else:
+ else:
try:
#get the data
email_data = self.src.fetch(the_id, imap_utils.GIMAPFetcher.GET_DATA_ONLY )
-
+
new_data[the_id][imap_utils.GIMAPFetcher.EMAIL_BODY] = \
email_data[the_id][imap_utils.GIMAPFetcher.EMAIL_BODY]
-
- # store data on disk within year month dir
+
+ # store data on disk within year month dir
gid = self.gstorer.bury_chat(new_data[the_id], local_dir = the_dir, compress = compress)
-
+
#update local index id gid => index per directory to be thought out
- LOG.debug("Create and store chat with imap id %s, gmail id %s." % (the_id, gid))
- except Exception, error:
- #do everything in this handler
- handle_sync_imap_error(error, the_id, self.error_report, self.src)
-
- nb_chats_processed += 1
-
+ LOG.debug("Create and store chat with imap id %s, gmail id %s." % (the_id, gid))
+ except Exception as error:
+ #do everything in this handler
+ handle_sync_imap_error(error, the_id, self.error_report, self.src)
+
+ nb_chats_processed += 1
+
#indicate every 50 messages the number of messages left to process
left_emails = (total_nb_chats_to_process - nb_chats_processed)
-
+
if (nb_chats_processed % 50) == 0 and (left_emails > 0):
elapsed = timer.elapsed() #elapsed time in seconds
LOG.critical("\n== Processed %d emails in %s. %d left to be stored (time estimate %s).==\n" % \
(nb_chats_processed, timer.seconds_to_human_time(elapsed), \
left_emails, \
timer.estimate_time_left(nb_chats_processed, elapsed, left_emails)))
-
+
# save id every 10 restored emails
if (nb_chats_processed % 10) == 0:
if gid:
self.save_lastid(self.OP_CHAT_SYNC, gid)
else:
LOG.info("Could not process imap with id %s. Ignore it\n")
- self.error_report['empty'].append((the_id, None))
-
+ self.error_report['empty'].append((the_id, None))
+
to_fetch -= set(new_data.keys()) #remove all found keys from to_fetch set
-
+
for the_id in to_fetch:
# case when gmail IMAP server returns OK without any data whatsoever
# eg. imap uid 142221L ignore it
@@ -645,109 +646,109 @@ def _sync_chats(self, imap_req, compress, restart):
self.error_report['empty_chats'].append((the_id, None))
else:
- imap_ids = []
-
+ imap_ids = []
+
LOG.critical("\nChats synchronisation operation performed in %s.\n" % (timer.seconds_to_human_time(timer.elapsed())))
return imap_ids
def _sync_emails(self, imap_req, compress, restart):
"""
Previous sync for emails
- First part of the double pass strategy:
+ First part of the double pass strategy:
- create and update emails in db
-
- """
+
+ """
timer = gmvault_utils.Timer()
timer.start()
-
+
#select all mail folder using the constant name defined in GIMAPFetcher
self.src.select_folder('ALLMAIL')
-
+
# get all imap ids in All Mail
imap_ids = self.src.search(imap_req)
-
+
# check if there is a restart
if restart:
LOG.critical("Restart mode activated for emails. Need to find information in Gmail, be patient ...")
imap_ids = self.get_gmails_ids_left_to_sync(self.OP_EMAIL_SYNC, imap_ids)
-
+
total_nb_emails_to_process = len(imap_ids) # total number of emails to get
-
+
LOG.critical("%d emails to be fetched." % (total_nb_emails_to_process))
-
+
nb_emails_processed = 0
-
+
to_fetch = set(imap_ids)
batch_fetcher = IMAPBatchFetcher(self.src, imap_ids, self.error_report, imap_utils.GIMAPFetcher.GET_ALL_BUT_DATA, \
default_batch_size = \
gmvault_utils.get_conf_defaults().getint("General","nb_messages_per_batch",500))
-
+
#LAST Thing to do remove all found ids from imap_ids and if ids left add missing in report
- for new_data in batch_fetcher:
+ for new_data in batch_fetcher:
for the_id in new_data:
#LOG.debug("new_data = %s\n" % (new_data))
if new_data.get(the_id, None):
LOG.debug("\nProcess imap id %s" % ( the_id ))
-
+
gid = new_data[the_id][imap_utils.GIMAPFetcher.GMAIL_ID]
-
+
the_dir = gmvault_utils.get_ym_from_datetime(new_data[the_id][imap_utils.GIMAPFetcher.IMAP_INTERNALDATE])
-
+
LOG.critical("Process email num %d (imap_id:%s) from %s." % (nb_emails_processed, the_id, the_dir))
-
+
#decode the labels that are received as utf7 => unicode
new_data[the_id][imap_utils.GIMAPFetcher.GMAIL_LABELS] = \
imap_utils.decode_labels(new_data[the_id][imap_utils.GIMAPFetcher.GMAIL_LABELS])
-
+
#pass the dir and the ID
curr_metadata = GMVaulter.check_email_on_disk( self.gstorer , \
new_data[the_id][imap_utils.GIMAPFetcher.GMAIL_ID], \
the_dir)
-
+
#if on disk check that the data is not different
if curr_metadata:
-
+
LOG.debug("metadata for %s already exists. Check if different." % (gid))
-
+
if self._metadata_needs_update(curr_metadata, new_data[the_id]):
-
+
LOG.debug("Email with imap id %s and gmail id %s has changed. Updated it." % (the_id, gid))
-
+
#restore everything at the moment
gid = self.gstorer.bury_metadata(new_data[the_id], local_dir = the_dir)
-
+
#update local index id gid => index per directory to be thought out
else:
LOG.debug("On disk metadata for %s is up to date." % (gid))
- else:
+ else:
try:
#get the data
LOG.debug("Get Data for %s." % (gid))
email_data = self.src.fetch(the_id, imap_utils.GIMAPFetcher.GET_DATA_ONLY )
-
+
new_data[the_id][imap_utils.GIMAPFetcher.EMAIL_BODY] = \
email_data[the_id][imap_utils.GIMAPFetcher.EMAIL_BODY]
-
- # store data on disk within year month dir
+
+ # store data on disk within year month dir
gid = self.gstorer.bury_email(new_data[the_id], local_dir = the_dir, compress = compress)
-
+
#update local index id gid => index per directory to be thought out
- LOG.debug("Create and store email with imap id %s, gmail id %s." % (the_id, gid))
- except Exception, error:
- handle_sync_imap_error(error, the_id, self.error_report, self.src) #do everything in this handler
-
+ LOG.debug("Create and store email with imap id %s, gmail id %s." % (the_id, gid))
+ except Exception as error:
+ handle_sync_imap_error(error, the_id, self.error_report, self.src) #do everything in this handler
+
nb_emails_processed += 1
-
+
#indicate every 50 messages the number of messages left to process
left_emails = (total_nb_emails_to_process - nb_emails_processed)
-
+
if (nb_emails_processed % 50) == 0 and (left_emails > 0):
elapsed = timer.elapsed() #elapsed time in seconds
LOG.critical("\n== Processed %d emails in %s. %d left to be stored (time estimate %s).==\n" % \
(nb_emails_processed, \
timer.seconds_to_human_time(elapsed), left_emails, \
timer.estimate_time_left(nb_emails_processed, elapsed, left_emails)))
-
+
# save id every 10 restored emails
if (nb_emails_processed % 10) == 0:
if gid:
@@ -755,69 +756,69 @@ def _sync_emails(self, imap_req, compress, restart):
else:
LOG.info("Could not process imap with id %s. Ignore it\n")
self.error_report['empty'].append((the_id, gid if gid else None))
-
+
to_fetch -= set(new_data.keys()) #remove all found keys from to_fetch set
-
+
for the_id in to_fetch:
# case when gmail IMAP server returns OK without any data whatsoever
# eg. imap uid 142221L ignore it
LOG.info("Could not process imap with id %s. Ignore it\n")
self.error_report['empty'].append((the_id, None))
-
+
LOG.critical("\nEmails synchronisation operation performed in %s.\n" % (timer.seconds_to_human_time(timer.elapsed())))
-
+
return imap_ids
-
+
def sync(self, imap_req = imap_utils.GIMAPFetcher.IMAP_ALL, compress_on_disk = True, \
db_cleaning = False, ownership_checking = True, \
restart = False, emails_only = False, chats_only = False):
"""
- sync mode
+ sync mode
"""
#check ownership to have one email per db unless user wants different
#save the owner if new
self._check_email_db_ownership(ownership_checking)
-
+
if not compress_on_disk:
LOG.critical("Disable compression when storing emails.")
-
+
if self.use_encryption:
LOG.critical("Encryption activated. All emails will be encrypted before to be stored.")
LOG.critical("Please take care of the encryption key stored in (%s) or all"\
" your stored emails will become unreadable." \
% (gmvault_db.GmailStorer.get_encryption_key_path(self.db_root_dir)))
-
+
self.timer.start() #start syncing emails
-
+
if not chats_only:
# backup emails
LOG.critical("Start emails synchronization.\n")
self._sync_emails(imap_req, compress = compress_on_disk, restart = restart)
else:
LOG.critical("Skip emails synchronization.\n")
-
+
if not emails_only:
# backup chats
LOG.critical("Start chats synchronization.\n")
self._sync_chats(imap_req, compress = compress_on_disk, restart = restart)
else:
LOG.critical("\nSkip chats synchronization.\n")
-
+
#delete supress emails from DB since last sync
if len(self.gstorer.get_db_owners()) <= 1:
self.check_clean_db(db_cleaning)
else:
LOG.critical("Deactivate database cleaning on a multi-owners Gmvault db.")
-
+
LOG.critical("Synchronisation operation performed in %s.\n" \
% (self.timer.seconds_to_human_time(self.timer.elapsed())))
-
+
#update number of reconnections
self.error_report["reconnections"] = self.src.total_nb_reconns
-
+
return self.error_report
-
+
def _delete_sync(self, imap_ids, db_gmail_ids, db_gmail_ids_info, msg_type):
"""
Delete emails from the database if necessary
@@ -825,76 +826,76 @@ def _delete_sync(self, imap_ids, db_gmail_ids, db_gmail_ids_info, msg_type):
db_gmail_ids_info : info read from metadata
msg_type : email or chat
"""
-
+
# optimize nb of items
nb_items = self.NB_GRP_OF_ITEMS if len(imap_ids) >= self.NB_GRP_OF_ITEMS else len(imap_ids)
-
+
LOG.critical("Call Gmail to check the stored %ss against the Gmail %ss ids and see which ones have been deleted.\n\n"\
- "This might take a few minutes ...\n" % (msg_type, msg_type))
-
+ "This might take a few minutes ...\n" % (msg_type, msg_type))
+
#calculate the list elements to delete
#query nb_items items in one query to minimise number of imap queries
for group_imap_id in itertools.izip_longest(fillvalue=None, *[iter(imap_ids)]*nb_items):
-
+
# if None in list remove it
- if None in group_imap_id:
+ if None in group_imap_id:
group_imap_id = [ im_id for im_id in group_imap_id if im_id != None ]
-
+
data = self.src.fetch(group_imap_id, imap_utils.GIMAPFetcher.GET_GMAIL_ID)
-
+
# syntax for 2.7 set comprehension { data[key][imap_utils.GIMAPFetcher.GMAIL_ID] for key in data }
# need to create a list for 2.6
db_gmail_ids.difference_update([data[key][imap_utils.GIMAPFetcher.GMAIL_ID] for key in data ])
-
+
if len(db_gmail_ids) == 0:
break
-
+
LOG.critical("Will delete %s %s(s) from gmvault db.\n" % (len(db_gmail_ids), msg_type) )
for gm_id in db_gmail_ids:
LOG.critical("gm_id %s not in the Gmail server. Delete it." % (gm_id))
self.gstorer.delete_emails([(gm_id, db_gmail_ids_info[gm_id])], msg_type)
-
+
def get_gmails_ids_left_to_sync(self, op_type, imap_ids):
"""
Get the ids that still needs to be sync
Return a list of ids
"""
-
+
filename = self.OP_TO_FILENAME.get(op_type, None)
-
+
if not filename:
raise Exception("Bad Operation (%s) in save_last_id. "\
"This should not happen, send the error to the software developers." % (op_type))
-
+
filepath = '%s/%s_%s' % (self.gstorer.get_info_dir(), self.login, filename)
-
+
if not os.path.exists(filepath):
LOG.critical("last_id.sync file %s doesn't exist.\nSync the full list of backed up emails." %(filepath))
return imap_ids
-
+
json_obj = json.load(open(filepath, 'r'))
-
+
last_id = json_obj['last_id']
-
+
last_id_index = -1
-
+
new_gmail_ids = imap_ids
-
+
try:
#get imap_id from stored gmail_id
dummy = self.src.search({'type':'imap', 'req':'X-GM-MSGID %s' % (last_id)})
-
+
imap_id = dummy[0]
last_id_index = imap_ids.index(imap_id)
LOG.critical("Restart from gmail id %s (imap id %s)." % (last_id, imap_id))
- new_gmail_ids = imap_ids[last_id_index:]
- except Exception, _: #ignore any exception and try to get all ids in case of problems. pylint:disable=W0703
+ new_gmail_ids = imap_ids[last_id_index:]
+ except Exception as _: #ignore any exception and try to get all ids in case of problems. pylint:disable=W0703
#element not in keys return current set of keys
LOG.critical("Error: Cannot restore from last restore gmail id. It is not in Gmail."\
" Sync the complete list of gmail ids requested from Gmail.")
-
+
return new_gmail_ids
-
+
def check_clean_db(self, db_cleaning):
"""
Check and clean the database (remove file that are not anymore in Gmail
@@ -909,60 +910,60 @@ def check_clean_db(self, db_cleaning):
return
else:
LOG.critical("Look for emails/chats that are in the Gmvault db but not in Gmail servers anymore.\n")
-
+
#get gmail_ids from db
LOG.critical("Read all gmail ids from the Gmvault db. It might take a bit of time ...\n")
-
+
timer = gmvault_utils.Timer() # needed for enhancing the user information
timer.start()
-
+
db_gmail_ids_info = self.gstorer.get_all_existing_gmail_ids()
-
+
LOG.critical("Found %s email(s) in the Gmvault db.\n" % (len(db_gmail_ids_info)) )
-
+
#create a set of keys
db_gmail_ids = set(db_gmail_ids_info.keys())
-
+
# get all imap ids in All Mail
self.src.select_folder('ALLMAIL') #go to all mail
imap_ids = self.src.search(imap_utils.GIMAPFetcher.IMAP_ALL) #search all
-
+
LOG.debug("Got %s emails imap_id(s) from the Gmail Server." % (len(imap_ids)))
-
+
#delete supress emails from DB since last sync
self._delete_sync(imap_ids, db_gmail_ids, db_gmail_ids_info, 'email')
-
+
# get all chats ids
if self.src.is_visible('CHATS'):
-
+
db_gmail_ids_info = self.gstorer.get_all_chats_gmail_ids()
-
+
LOG.critical("Found %s chat(s) in the Gmvault db.\n" % (len(db_gmail_ids_info)) )
-
+
self.src.select_folder('CHATS') #go to chats
chat_ids = self.src.search(imap_utils.GIMAPFetcher.IMAP_ALL)
-
+
db_chat_ids = set(db_gmail_ids_info.keys())
-
+
LOG.debug("Got %s chat imap_ids from the Gmail Server." % (len(chat_ids)))
-
+
#delete supress emails from DB since last sync
self._delete_sync(chat_ids, db_chat_ids, db_gmail_ids_info , 'chat')
else:
LOG.critical("Chats IMAP Directory not visible on Gmail. Ignore deletion of chats.")
-
-
+
+
LOG.critical("\nDeletion checkup done in %s." % (timer.elapsed_human_time()))
-
-
+
+
def remote_sync(self):
"""
Sync with a remote source (IMAP mirror or cloud storage area)
"""
- #sync remotely
+ #sync remotely
pass
-
-
+
+
def save_lastid(self, op_type, gm_id):
"""
Save the passed gmid in last_id.restore
@@ -975,8 +976,8 @@ def save_lastid(self, op_type, gm_id):
raise Exception("Bad Operation (%s) in save_last_id. "
"This should not happen, send the error to the "
"software developers." % op_type)
-
- #filepath = '%s/%s_%s' % (gmvault_utils.get_home_dir_path(), self.login, filename)
+
+ #filepath = '%s/%s_%s' % (gmvault_utils.get_home_dir_path(), self.login, filename)
filepath = '%s/%s_%s' % (self.gstorer.get_info_dir(), self.login,
filename)
@@ -997,153 +998,153 @@ def get_gmails_ids_left_to_restore(self, op_type, db_gmail_ids_info):
#filepath = '%s/%s_%s' % (gmvault_utils.get_home_dir_path(), self.login, filename)
filepath = '%s/%s_%s' % (self.gstorer.get_info_dir(), self.login, filename)
-
+
if not os.path.exists(filepath):
LOG.critical("last_id restore file %s doesn't exist.\nRestore the full list of backed up emails." %(filepath))
return db_gmail_ids_info
-
+
json_obj = json.load(open(filepath, 'r'))
-
+
last_id = json_obj['last_id']
-
+
last_id_index = -1
try:
- keys = db_gmail_ids_info.keys()
+ keys = list(db_gmail_ids_info.keys())
last_id_index = keys.index(last_id)
LOG.critical("Restart from gmail id %s." % (last_id))
- except ValueError, _:
+ except ValueError as _:
#element not in keys return current set of keys
LOG.error("Cannot restore from last restore gmail id. It is not in the disk database.")
-
+
new_gmail_ids_info = collections_utils.OrderedDict()
if last_id_index != -1:
- for key in db_gmail_ids_info.keys()[last_id_index+1:]:
+ for key in list(db_gmail_ids_info.keys())[last_id_index+1:]:
new_gmail_ids_info[key] = db_gmail_ids_info[key]
else:
- new_gmail_ids_info = db_gmail_ids_info
-
- return new_gmail_ids_info
-
+ new_gmail_ids_info = db_gmail_ids_info
+
+ return new_gmail_ids_info
+
def restore(self, pivot_dir = None, extra_labels = [], \
restart = False, emails_only = False, chats_only = False): #pylint:disable=W0102
"""
Restore emails in a gmail account
"""
self.timer.start() #start restoring
-
+
#self.src.select_folder('ALLMAIL') #insure that Gmvault is in ALLMAIL
-
+
if not chats_only:
# backup emails
LOG.critical("Start emails restoration.\n")
-
+
if pivot_dir:
LOG.critical("Quick mode activated. Will only restore all emails since %s.\n" % (pivot_dir))
-
+
self.restore_emails(pivot_dir, extra_labels, restart)
else:
LOG.critical("Skip emails restoration.\n")
-
+
if not emails_only:
# backup chats
LOG.critical("Start chats restoration.\n")
self.restore_chats(extra_labels, restart)
else:
LOG.critical("Skip chats restoration.\n")
-
+
LOG.critical("Restore operation performed in %s.\n" \
% (self.timer.seconds_to_human_time(self.timer.elapsed())))
-
+
#update number of reconnections
self.error_report["reconnections"] = self.src.total_nb_reconns
-
+
return self.error_report
-
+
def restore_chats(self, extra_labels = [], restart = False): #pylint:disable=W0102
"""
restore chats
"""
- LOG.critical("Restore chats in gmail account %s." % (self.login) )
-
+ LOG.critical("Restore chats in gmail account %s." % (self.login) )
+
LOG.critical("Read chats info from %s gmvault-db." % (self.db_root_dir))
-
+
#get gmail_ids from db
db_gmail_ids_info = self.gstorer.get_all_chats_gmail_ids()
-
- LOG.critical("Total number of chats to restore %s." % (len(db_gmail_ids_info.keys())))
-
+
+ LOG.critical("Total number of chats to restore %s." % (len(list(db_gmail_ids_info.keys()))))
+
if restart:
db_gmail_ids_info = self.get_gmails_ids_left_to_restore(self.OP_CHAT_RESTORE, db_gmail_ids_info)
-
+
total_nb_emails_to_restore = len(db_gmail_ids_info)
LOG.critical("Got all chats id left to restore. Still %s chats to do.\n" % (total_nb_emails_to_restore) )
-
+
existing_labels = set() #set of existing labels to not call create_gmail_labels all the time
nb_emails_restored = 0 #to count nb of emails restored
labels_to_apply = collections_utils.SetMultimap()
#get all mail folder name
all_mail_name = self.src.get_folder_name("ALLMAIL")
-
+
# go to DRAFTS folder because if you are in ALL MAIL when uploading emails it is very slow
folder_def_location = gmvault_utils.get_conf_defaults().get("General", "restore_default_location", "DRAFTS")
self.src.select_folder(folder_def_location)
-
+
timer = gmvault_utils.Timer() # local timer for restore emails
timer.start()
-
- nb_items = gmvault_utils.get_conf_defaults().get_int("General", "nb_messages_per_restore_batch", 100)
-
- for group_imap_ids in itertools.izip_longest(fillvalue=None, *[iter(db_gmail_ids_info)]*nb_items):
+
+ nb_items = gmvault_utils.get_conf_defaults().get_int("General", "nb_messages_per_restore_batch", 100)
+
+ for group_imap_ids in itertools.izip_longest(fillvalue=None, *[iter(db_gmail_ids_info)]*nb_items):
last_id = group_imap_ids[-1] #will be used to save the last id
#remove all None elements from group_imap_ids
group_imap_ids = itertools.ifilter(lambda x: x != None, group_imap_ids)
-
+
labels_to_create = set() #create label set
labels_to_create.update(extra_labels) # add extra labels to applied to all emails
-
+
LOG.critical("Processing next batch of %s chats.\n" % (nb_items))
-
+
# unbury the metadata for all these emails
- for gm_id in group_imap_ids:
+ for gm_id in group_imap_ids:
email_meta, email_data = self.gstorer.unbury_email(gm_id)
-
+
LOG.critical("Pushing chat content with id %s." % (gm_id))
LOG.debug("Subject = %s." % (email_meta[self.gstorer.SUBJECT_K]))
try:
# push data in gmail account and get uids
imap_id = self.src.push_data(all_mail_name, email_data, \
email_meta[self.gstorer.FLAGS_K] , \
- email_meta[self.gstorer.INT_DATE_K] )
-
+ email_meta[self.gstorer.INT_DATE_K] )
+
#labels for this email => real_labels U extra_labels
labels = set(email_meta[self.gstorer.LABELS_K])
-
+
# add in the labels_to_create struct
for label in labels:
LOG.debug("label = %s\n" % (label))
labels_to_apply[str(label)] = imap_id
-
+
# get list of labels to create (do a union with labels to create)
- labels_to_create.update([ label for label in labels if label not in existing_labels])
-
- except Exception, err:
+ labels_to_create.update([ label for label in labels if label not in existing_labels])
+
+ except Exception as err:
handle_restore_imap_error(err, gm_id, db_gmail_ids_info, self)
#create the non existing labels and update existing labels
if len(labels_to_create) > 0:
LOG.debug("Labels creation tentative for chats ids %s." % (group_imap_ids))
existing_labels = self.src.create_gmail_labels(labels_to_create, existing_labels)
-
+
# associate labels with emails
LOG.critical("Applying labels to the current batch of chats.")
try:
LOG.debug("Changing directory. Going into ALLMAIL")
self.src.select_folder('ALLMAIL') #go to ALL MAIL to make STORE usable
for label in labels_to_apply.keys():
- self.src.apply_labels_to(labels_to_apply[label], [label])
- except Exception, err:
+ self.src.apply_labels_to(labels_to_apply[label], [label])
+ except Exception as err:
LOG.error("Problem when applying labels %s to the following ids: %s" %(label, labels_to_apply[label]), err)
if isinstance(err, imaplib.IMAP4.abort) and str(err).find("=> Gmvault ssl socket error: EOF") >= 0:
# if this is a Gmvault SSL Socket error ignore labelling and continue the restore
@@ -1155,25 +1156,25 @@ def restore_chats(self, extra_labels = [], restart = False): #pylint:disable=W01
finally:
self.src.select_folder(folder_def_location) # go back to an empty DIR (Drafts) to be fast
labels_to_apply = collections_utils.SetMultimap() #reset label to apply
-
+
nb_emails_restored += nb_items
-
+
#indicate every 10 messages the number of messages left to process
left_emails = (total_nb_emails_to_restore - nb_emails_restored)
-
- if (left_emails > 0):
+
+ if (left_emails > 0):
elapsed = timer.elapsed() #elapsed time in seconds
LOG.critical("\n== Processed %d chats in %s. %d left to be restored "\
"(time estimate %s).==\n" % \
(nb_emails_restored, timer.seconds_to_human_time(elapsed), \
left_emails, timer.estimate_time_left(nb_emails_restored, elapsed, left_emails)))
-
+
# save id every nb_items restored emails
# add the last treated gm_id
self.save_lastid(self.OP_EMAIL_RESTORE, last_id)
-
- return self.error_report
-
+
+ return self.error_report
+
def restore_emails(self, pivot_dir = None, extra_labels = [], restart = False):
"""
restore emails in a gmail account using batching to group restore
@@ -1183,82 +1184,82 @@ def restore_emails(self, pivot_dir = None, extra_labels = [], restart = False):
The idea is to get a batch of 50 emails and push them all in the mailbox one by one and get the uid for each of them.
Then create a dict of labels => uid_list and for each label send a unique store command after having changed dir
"""
- LOG.critical("Restore emails in gmail account %s." % (self.login) )
-
+ LOG.critical("Restore emails in gmail account %s." % (self.login) )
+
LOG.critical("Read email info from %s gmvault-db." % (self.db_root_dir))
-
+
#get gmail_ids from db
db_gmail_ids_info = self.gstorer.get_all_existing_gmail_ids(pivot_dir)
-
- LOG.critical("Total number of elements to restore %s." % (len(db_gmail_ids_info.keys())))
-
+
+ LOG.critical("Total number of elements to restore %s." % (len(list(db_gmail_ids_info.keys()))))
+
if restart:
db_gmail_ids_info = self.get_gmails_ids_left_to_restore(self.OP_EMAIL_RESTORE, db_gmail_ids_info)
-
+
total_nb_emails_to_restore = len(db_gmail_ids_info)
-
+
LOG.critical("Got all emails id left to restore. Still %s emails to do.\n" % (total_nb_emails_to_restore) )
-
+
existing_labels = set() #set of existing labels to not call create_gmail_labels all the time
nb_emails_restored = 0 #to count nb of emails restored
labels_to_apply = collections_utils.SetMultimap()
#get all mail folder name
all_mail_name = self.src.get_folder_name("ALLMAIL")
-
+
# go to DRAFTS folder because if you are in ALL MAIL when uploading emails it is very slow
folder_def_location = gmvault_utils.get_conf_defaults().get("General", "restore_default_location", "DRAFTS")
self.src.select_folder(folder_def_location)
-
+
timer = gmvault_utils.Timer() # local timer for restore emails
timer.start()
-
- nb_items = gmvault_utils.get_conf_defaults().get_int("General", "nb_messages_per_restore_batch", 80)
-
- for group_imap_ids in itertools.izip_longest(fillvalue=None, *[iter(db_gmail_ids_info)]*nb_items):
-
+
+ nb_items = gmvault_utils.get_conf_defaults().get_int("General", "nb_messages_per_restore_batch", 80)
+
+ for group_imap_ids in itertools.izip_longest(fillvalue=None, *[iter(db_gmail_ids_info)]*nb_items):
+
last_id = group_imap_ids[-1] #will be used to save the last id
#remove all None elements from group_imap_ids
group_imap_ids = itertools.ifilter(lambda x: x != None, group_imap_ids)
-
+
labels_to_create = set() #create label set
labels_to_create.update(extra_labels) # add extra labels to applied to all emails
-
+
LOG.critical("Processing next batch of %s emails.\n" % (nb_items))
-
+
# unbury the metadata for all these emails
- for gm_id in group_imap_ids:
+ for gm_id in group_imap_ids:
email_meta, email_data = self.gstorer.unbury_email(gm_id)
-
+
LOG.critical("Pushing email body with id %s." % (gm_id))
LOG.debug("Subject = %s." % (email_meta[self.gstorer.SUBJECT_K]))
try:
# push data in gmail account and get uids
imap_id = self.src.push_data(all_mail_name, email_data, \
email_meta[self.gstorer.FLAGS_K] , \
- email_meta[self.gstorer.INT_DATE_K] )
-
+ email_meta[self.gstorer.INT_DATE_K] )
+
#labels for this email => real_labels U extra_labels
labels = set(email_meta[self.gstorer.LABELS_K])
-
+
# add in the labels_to_create struct
for label in labels:
if label != "\\Starred":
#LOG.debug("label = %s\n" % (label.encode('utf-8')))
LOG.debug("label = %s\n" % (label))
labels_to_apply[label] = imap_id
-
+
# get list of labels to create (do a union with labels to create)
- labels_to_create.update([ label for label in labels if label not in existing_labels])
-
- except Exception, err:
+ labels_to_create.update([ label for label in labels if label not in existing_labels])
+
+ except Exception as err:
handle_restore_imap_error(err, gm_id, db_gmail_ids_info, self)
#create the non existing labels and update existing labels
if len(labels_to_create) > 0:
LOG.debug("Labels creation tentative for emails with ids %s." % (group_imap_ids))
existing_labels = self.src.create_gmail_labels(labels_to_create, existing_labels)
-
+
# associate labels with emails
LOG.critical("Applying labels to the current batch of emails.")
try:
@@ -1268,8 +1269,8 @@ def restore_emails(self, pivot_dir = None, extra_labels = [], restart = False):
self.src.select_folder('ALLMAIL') #go to ALL MAIL to make STORE usable
LOG.debug("Changed dir. Operation time = %s ms" % (the_timer.elapsed_ms()))
for label in labels_to_apply.keys():
- self.src.apply_labels_to(labels_to_apply[label], [label])
- except Exception, err:
+ self.src.apply_labels_to(labels_to_apply[label], [label])
+ except Exception as err:
LOG.error("Problem when applying labels %s to the following ids: %s" %(label, labels_to_apply[label]), err)
LOG.error("Problem when applying labels.", err)
if isinstance(err, imaplib.IMAP4.abort) and str(err).find("=> Gmvault ssl socket error: EOF") >= 0:
@@ -1282,22 +1283,22 @@ def restore_emails(self, pivot_dir = None, extra_labels = [], restart = False):
finally:
self.src.select_folder(folder_def_location) # go back to an empty DIR (Drafts) to be fast
labels_to_apply = collections_utils.SetMultimap() #reset label to apply
-
+
nb_emails_restored += nb_items
-
+
#indicate every 10 messages the number of messages left to process
left_emails = (total_nb_emails_to_restore - nb_emails_restored)
-
- if (left_emails > 0):
+
+ if (left_emails > 0):
elapsed = timer.elapsed() #elapsed time in seconds
LOG.critical("\n== Processed %d emails in %s. %d left to be restored "\
"(time estimate %s).==\n" % \
(nb_emails_restored, timer.seconds_to_human_time(elapsed), \
left_emails, timer.estimate_time_left(nb_emails_restored, elapsed, left_emails)))
-
+
# save id every 50 restored emails
# add the last treated gm_id
self.save_lastid(self.OP_EMAIL_RESTORE, last_id)
-
- return self.error_report
-
+
+ return self.error_report
+
diff --git a/src/sandbox/gmvault_multiprocess.py b/src/sandbox/gmvault_multiprocess.py
index 34c38bf4..3bad0137 100755
--- a/src/sandbox/gmvault_multiprocess.py
+++ b/src/sandbox/gmvault_multiprocess.py
@@ -16,6 +16,7 @@
along with this program. If not, see .
'''
+from __future__ import absolute_import
import json
import time
import datetime
@@ -94,7 +95,7 @@ def handle_sync_imap_error(the_exception, the_id, error_report, src):
try:
#try to get the gmail_id
curr = src.fetch(the_id, imap_utils.GIMAPFetcher.GET_GMAIL_ID)
- except Exception, _: #pylint:disable-msg=W0703
+ except Exception as _: #pylint:disable-msg=W0703
curr = None
LOG.critical("Error when trying to get gmail id for message with imap id %s." % (the_id))
LOG.critical("Disconnect, wait for 20 sec then reconnect.")
@@ -128,7 +129,7 @@ def handle_sync_imap_error(the_exception, the_id, error_report, src):
#try to get the gmail_id
LOG.critical("One more attempt. Trying to fetch the Gmail ID for %s" % (the_id) )
curr = src.fetch(the_id, imap_utils.GIMAPFetcher.GET_GMAIL_ID)
- except Exception, _: #pylint:disable-msg=W0703
+ except Exception as _: #pylint:disable-msg=W0703
curr = None
if curr:
@@ -174,7 +175,7 @@ def individual_fetch(self, imap_ids):
single_data = self.src.fetch(the_id, self.request)
new_data.update(single_data)
- except Exception, error:
+ except Exception as error:
handle_sync_imap_error(error, the_id, self.error_report, self.src) #do everything in this handler
return new_data
@@ -200,7 +201,7 @@ def next(self):
return new_data
- except imaplib.IMAP4.error, _:
+ except imaplib.IMAP4.error as _:
new_data = self.individual_fetch(batch)
return new_data
@@ -339,7 +340,7 @@ def check_email_on_disk(cls, a_gstorer, a_id, a_dir = None):
if a_dir:
return a_gstorer.unbury_metadata(a_id, a_dir)
- except ValueError, json_error:
+ except ValueError as json_error:
LOG.exception("Cannot read file %s. Try to fetch the data again" % ('%s.meta' % (a_id)), json_error )
return None
@@ -492,7 +493,7 @@ def _sync_chats(self, imap_req, compress, restart):
#update local index id gid => index per directory to be thought out
LOG.debug("Create and store chat with imap id %s, gmail id %s." % (the_id, gid))
- except Exception, error:
+ except Exception as error:
handle_sync_imap_error(error, the_id, self.error_report, self.src) #do everything in this handler
nb_chats_processed += 1
@@ -605,7 +606,7 @@ def _sync_emails(self, imap_req, compress, restart):
#update local index id gid => index per directory to be thought out
LOG.debug("Create and store email with imap id %s, gmail id %s." % (the_id, gid))
- except Exception, error:
+ except Exception as error:
handle_sync_imap_error(error, the_id, self.error_report, self.src) #do everything in this handler
nb_emails_processed += 1
@@ -758,7 +759,7 @@ def get_gmails_ids_left_to_sync(self, op_type, imap_ids):
last_id_index = imap_ids.index(imap_id)
LOG.critical("Restart from gmail id %s (imap id %s)." % (last_id, imap_id))
new_gmail_ids = imap_ids[last_id_index:]
- except Exception, _: #ignore any exception and try to get all ids in case of problems. pylint:disable=W0703
+ except Exception as _: #ignore any exception and try to get all ids in case of problems. pylint:disable=W0703
#element not in keys return current set of keys
LOG.critical("Error: Cannot restore from last restore gmail id. It is not in Gmail."\
" Sync the complete list of gmail ids requested from Gmail.")
@@ -871,16 +872,16 @@ def get_gmails_ids_left_to_restore(self, op_type, db_gmail_ids_info):
last_id_index = -1
try:
- keys = db_gmail_ids_info.keys()
+ keys = list(db_gmail_ids_info.keys())
last_id_index = keys.index(last_id)
LOG.critical("Restart from gmail id %s." % (last_id))
- except ValueError, _:
+ except ValueError as _:
#element not in keys return current set of keys
LOG.error("Cannot restore from last restore gmail id. It is not in the disk database.")
new_gmail_ids_info = collections_utils.OrderedDict()
if last_id_index != -1:
- for key in db_gmail_ids_info.keys()[last_id_index+1:]:
+ for key in list(db_gmail_ids_info.keys())[last_id_index+1:]:
new_gmail_ids_info[key] = db_gmail_ids_info[key]
else:
new_gmail_ids_info = db_gmail_ids_info
@@ -936,7 +937,7 @@ def common_restore(self, the_type, db_gmail_ids_info, extra_labels = [], restart
LOG.critical("Read %s info from %s gmvault-db." % (msg, self.db_root_dir))
- LOG.critical("Total number of %s to restore %s." % (msg, len(db_gmail_ids_info.keys())))
+ LOG.critical("Total number of %s to restore %s." % (msg, len(list(db_gmail_ids_info.keys()))))
if restart:
db_gmail_ids_info = self.get_gmails_ids_left_to_restore(op, db_gmail_ids_info)
@@ -993,7 +994,7 @@ def common_restore(self, the_type, db_gmail_ids_info, extra_labels = [], restart
if (nb_emails_restored % 10) == 0:
self.save_lastid(self.OP_CHAT_RESTORE, gm_id)
- except imaplib.IMAP4.abort, abort:
+ except imaplib.IMAP4.abort as abort:
# if this is a Gmvault SSL Socket error quarantine the email and continue the restore
if str(abort).find("=> Gmvault ssl socket error: EOF") >= 0:
@@ -1006,7 +1007,7 @@ def common_restore(self, the_type, db_gmail_ids_info, extra_labels = [], restart
else:
raise abort
- except imaplib.IMAP4.error, err:
+ except imaplib.IMAP4.error as err:
LOG.error("Catched IMAP Error %s" % (str(err)))
LOG.exception(err)
@@ -1020,7 +1021,7 @@ def common_restore(self, the_type, db_gmail_ids_info, extra_labels = [], restart
self.error_report['emails_in_quarantine'].append(gm_id)
else:
raise err
- except imap_utils.PushEmailError, p_err:
+ except imap_utils.PushEmailError as p_err:
LOG.error("Catch the following exception %s" % (str(p_err)))
LOG.exception(p_err)
@@ -1031,7 +1032,7 @@ def common_restore(self, the_type, db_gmail_ids_info, extra_labels = [], restart
self.error_report['emails_in_quarantine'].append(gm_id)
else:
raise p_err
- except Exception, err:
+ except Exception as err:
LOG.error("Catch the following exception %s" % (str(err)))
LOG.exception(err)
raise err
@@ -1052,7 +1053,7 @@ def old_restore_chats(self, extra_labels = [], restart = False): #pylint:disable
#get gmail_ids from db
db_gmail_ids_info = self.gstorer.get_all_chats_gmail_ids()
- LOG.critical("Total number of chats to restore %s." % (len(db_gmail_ids_info.keys())))
+ LOG.critical("Total number of chats to restore %s." % (len(list(db_gmail_ids_info.keys()))))
if restart:
db_gmail_ids_info = self.get_gmails_ids_left_to_restore(self.OP_CHAT_RESTORE, db_gmail_ids_info)
@@ -1109,7 +1110,7 @@ def old_restore_chats(self, extra_labels = [], restart = False): #pylint:disable
if (nb_emails_restored % 10) == 0:
self.save_lastid(self.OP_CHAT_RESTORE, gm_id)
- except imaplib.IMAP4.abort, abort:
+ except imaplib.IMAP4.abort as abort:
# if this is a Gmvault SSL Socket error quarantine the email and continue the restore
if str(abort).find("=> Gmvault ssl socket error: EOF") >= 0:
@@ -1122,7 +1123,7 @@ def old_restore_chats(self, extra_labels = [], restart = False): #pylint:disable
else:
raise abort
- except imaplib.IMAP4.error, err:
+ except imaplib.IMAP4.error as err:
LOG.error("Catched IMAP Error %s" % (str(err)))
LOG.exception(err)
@@ -1136,7 +1137,7 @@ def old_restore_chats(self, extra_labels = [], restart = False): #pylint:disable
self.error_report['emails_in_quarantine'].append(gm_id)
else:
raise err
- except imap_utils.PushEmailError, p_err:
+ except imap_utils.PushEmailError as p_err:
LOG.error("Catch the following exception %s" % (str(p_err)))
LOG.exception(p_err)
@@ -1147,7 +1148,7 @@ def old_restore_chats(self, extra_labels = [], restart = False): #pylint:disable
self.error_report['emails_in_quarantine'].append(gm_id)
else:
raise p_err
- except Exception, err:
+ except Exception as err:
LOG.error("Catch the following exception %s" % (str(err)))
LOG.exception(err)
raise err
@@ -1169,7 +1170,7 @@ def restore_chats(self, extra_labels = [], restart = False): #pylint:disable=W01
#get gmail_ids from db
db_gmail_ids_info = self.gstorer.get_all_chats_gmail_ids()
- LOG.critical("Total number of chats to restore %s." % (len(db_gmail_ids_info.keys())))
+ LOG.critical("Total number of chats to restore %s." % (len(list(db_gmail_ids_info.keys()))))
if restart:
db_gmail_ids_info = self.get_gmails_ids_left_to_restore(self.OP_CHAT_RESTORE, db_gmail_ids_info)
@@ -1227,7 +1228,7 @@ def restore_chats(self, extra_labels = [], restart = False): #pylint:disable=W01
# get list of labels to create (do a union with labels to create)
labels_to_create.update([ label for label in labels if label not in existing_labels])
- except Exception, err:
+ except Exception as err:
handle_restore_imap_error(err, gm_id, db_gmail_ids_info, self)
#create the non existing labels and update existing labels
@@ -1242,7 +1243,7 @@ def restore_chats(self, extra_labels = [], restart = False): #pylint:disable=W01
self.src.select_folder('ALLMAIL') #go to ALL MAIL to make STORE usable
for label in labels_to_apply.keys():
self.src.apply_labels_to(labels_to_apply[label], [label])
- except Exception, err:
+ except Exception as err:
LOG.error("Problem when applying labels %s to the following ids: %s" %(label, labels_to_apply[label]), err)
if isinstance(err, imaplib.IMAP4.abort) and str(err).find("=> Gmvault ssl socket error: EOF") >= 0:
# if this is a Gmvault SSL Socket error quarantine the email and continue the restore
@@ -1292,7 +1293,7 @@ def restore_emails(self, pivot_dir = None, extra_labels = [], restart = False):
#get gmail_ids from db
db_gmail_ids_info = self.gstorer.get_all_existing_gmail_ids(pivot_dir)
- LOG.critical("Total number of elements to restore %s." % (len(db_gmail_ids_info.keys())))
+ LOG.critical("Total number of elements to restore %s." % (len(list(db_gmail_ids_info.keys()))))
if restart:
db_gmail_ids_info = self.get_gmails_ids_left_to_restore(self.OP_EMAIL_RESTORE, db_gmail_ids_info)
@@ -1351,7 +1352,7 @@ def restore_emails(self, pivot_dir = None, extra_labels = [], restart = False):
# get list of labels to create (do a union with labels to create)
labels_to_create.update([ label for label in labels if label not in existing_labels])
- except Exception, err:
+ except Exception as err:
handle_restore_imap_error(err, gm_id, db_gmail_ids_info, self)
#create the non existing labels and update existing labels
@@ -1369,7 +1370,7 @@ def restore_emails(self, pivot_dir = None, extra_labels = [], restart = False):
LOG.debug("Changed dir. Operation time = %s ms" % (t.elapsed_ms()))
for label in labels_to_apply.keys():
self.src.apply_labels_to(labels_to_apply[label], [label])
- except Exception, err:
+ except Exception as err:
LOG.error("Problem when applying labels %s to the following ids: %s" %(label, labels_to_apply[label]), err)
if isinstance(err, imaplib.IMAP4.abort) and str(err).find("=> Gmvault ssl socket error: EOF") >= 0:
# if this is a Gmvault SSL Socket error quarantine the email and continue the restore
@@ -1419,7 +1420,7 @@ def old_restore_emails(self, pivot_dir = None, extra_labels = [], restart = Fals
#get gmail_ids from db
db_gmail_ids_info = self.gstorer.get_all_existing_gmail_ids(pivot_dir)
- LOG.critical("Total number of elements to restore %s." % (len(db_gmail_ids_info.keys())))
+ LOG.critical("Total number of elements to restore %s." % (len(list(db_gmail_ids_info.keys()))))
if restart:
db_gmail_ids_info = self.get_gmails_ids_left_to_restore(self.OP_EMAIL_RESTORE, db_gmail_ids_info)
@@ -1490,7 +1491,7 @@ def old_restore_emails(self, pivot_dir = None, extra_labels = [], restart = Fals
# get list of labels to create (do a union with labels to create)
labels_to_create.update([ label for label in labels if label not in existing_labels])
- except Exception, err:
+ except Exception as err:
handle_restore_imap_error(err, gm_id, db_gmail_ids_info, self)
#create the non existing labels and update existing labels
@@ -1582,7 +1583,7 @@ def run(self):
for label in labels_to_apply.keys():
LOG.critical("Apply %s to %s" % (label, labels_to_apply[label]))
self.src.apply_labels_to(labels_to_apply[label], [label])
- except Exception, err:
+ except Exception as err:
LOG.error("Problem when applying labels %s to the following ids: %s" %(label, labels_to_apply[label]), err)
finally:
#self.queue.task_done()
diff --git a/src/sandbox/json_tests.py b/src/sandbox/json_tests.py
index a83cb13c..a7fa8a6f 100644
--- a/src/sandbox/json_tests.py
+++ b/src/sandbox/json_tests.py
@@ -4,7 +4,10 @@
@author: aubert
"""
+from __future__ import absolute_import, print_function
+
import json
+import six
string_to_test = u"Чаты"
labels = [ 0, string_to_test ]
@@ -30,7 +33,7 @@ def format(self, record):
pass
else:
break
-
+
if self.formatter is None:
return record.message
return self.formatter(record, self)
@@ -40,62 +43,62 @@ def data_to_test():
data to test
"""
meta_obj = { 'labels' : labels }
-
+
with open("/tmp/test.json", 'w') as f:
json.dump(meta_obj, f)
print("Data stored")
-
+
with open("/tmp/test.json") as f:
metadata = json.load(f)
-
+
new_labels = []
-
+
for label in metadata['labels']:
- if isinstance(label, (int, long, float, complex)):
- label = unicode(str(label))
-
+ if isinstance(label, (int, int, float, complex)):
+ label = six.text_type(str(label))
+
new_labels.append(label)
-
+
metadata['labels'] = new_labels
-
- print("metadata = %s\n" % metadata)
-
- print("type(metadata['labels'][0]) = %s" % (type(metadata['labels'][0])))
-
- print("metadata['labels'][0] = %s" % (metadata['labels'][0]))
-
- print("type(metadata['labels'][1]) = %s" % (type(metadata['labels'][1])))
-
- print("metadata['labels'][1] = %s" % (metadata['labels'][1]))
-
+
+ print(("metadata = %s\n" % metadata))
+
+ print(("type(metadata['labels'][0]) = %s" % (type(metadata['labels'][0]))))
+
+ print(("metadata['labels'][0] = %s" % (metadata['labels'][0])))
+
+ print(("type(metadata['labels'][1]) = %s" % (type(metadata['labels'][1]))))
+
+ print(("metadata['labels'][1] = %s" % (metadata['labels'][1])))
+
def header_regexpr_test():
"""
-
- """
+
+ """
#the_str = 'X-Gmail-Received: cef1a177794b2b6282967d22bcc2b6f49447a70d\r\nMessage-ID: <8b230a7105082305316d9c1a54@mail.gmail.com>\r\nSubject: Hessian ssl\r\n\r\n'
the_str = 'Message-ID: <8b230a7105082305316d9c1a54@mail.gmail.com>\r\nX-Gmail-Received: cef1a177794b2b6282967d22bcc2b6f49447a70d\r\nSubject: Hessian ssl\r\n\r\n'
-
-
+
+
import gmv.gmvault_db as gmvault_db
-
+
matched = gmvault_db.GmailStorer.HF_SUB_RE.search(the_str)
if matched:
subject = matched.group('subject')
- print("subject matched = <%s>\n" % (subject))
-
+ print(("subject matched = <%s>\n" % (subject)))
+
# look for a msg id
matched = gmvault_db.GmailStorer.HF_MSGID_RE.search(the_str)
if matched:
msgid = matched.group('msgid')
- print("msgid matched = <%s>\n" % (msgid))
+ print(("msgid matched = <%s>\n" % (msgid)))
+
-
matched = gmvault_db.GmailStorer.HF_XGMAIL_RECV_RE.search(the_str)
if matched:
received = matched.group('received').strip()
- print("matched = <%s>\n" % (received))
+ print(("matched = <%s>\n" % (received)))
if __name__ == '__main__':
header_regexpr_test()
diff --git a/src/sandbox/oauth2.py b/src/sandbox/oauth2.py
index 89bdc9cd..3e401b3a 100644
--- a/src/sandbox/oauth2.py
+++ b/src/sandbox/oauth2.py
@@ -60,6 +60,8 @@
a AUTHENTICATE XOAUTH2 a9sha9sfs[...]9dfja929dk==
"""
+from __future__ import absolute_import, print_function
+
import base64
import imaplib
import json
@@ -67,6 +69,8 @@
import smtplib
import sys
import urllib
+import six
+from six.moves import input
def SetupOptionParser():
@@ -156,7 +160,7 @@ def FormatUrlParams(params):
A URL query string version of the given parameters.
"""
param_fragments = []
- for param in sorted(params.iteritems(), key=lambda x: x[0]):
+ for param in sorted(six.iteritems(params), key=lambda x: x[0]):
param_fragments.append('%s=%s' % (param[0], UrlEscape(param[1])))
return '&'.join(param_fragments)
@@ -262,7 +266,7 @@ def TestImapAuthentication(user, auth_string):
auth_string: A valid OAuth2 string, as returned by GenerateOAuth2String.
Must not be base64-encoded, since imaplib does its own base64-encoding.
"""
- print
+ print()
imap_conn = imaplib.IMAP4_SSL('imap.gmail.com')
imap_conn.debug = 4
imap_conn.authenticate('XOAUTH2', lambda x: auth_string)
@@ -277,7 +281,7 @@ def TestSmtpAuthentication(user, auth_string):
auth_string: A valid OAuth2 string, not base64-encoded, as returned by
GenerateOAuth2String.
"""
- print
+ print()
smtp_conn = smtplib.SMTP('smtp.gmail.com', 587)
smtp_conn.set_debuglevel(True)
smtp_conn.ehlo('test')
@@ -288,7 +292,7 @@ def TestSmtpAuthentication(user, auth_string):
def RequireOptions(options, *args):
missing = [arg for arg in args if getattr(options, arg) is None]
if missing:
- print 'Missing options: %s' % ' '.join(missing)
+ print('Missing options: %s' % ' '.join(missing))
sys.exit(-1)
@@ -299,22 +303,22 @@ def main(argv):
RequireOptions(options, 'client_id', 'client_secret')
response = RefreshToken(options.client_id, options.client_secret,
options.refresh_token)
- print 'Access Token: %s' % response['access_token']
- print 'Access Token Expiration Seconds: %s' % response['expires_in']
+ print('Access Token: %s' % response['access_token'])
+ print('Access Token Expiration Seconds: %s' % response['expires_in'])
elif options.generate_oauth2_string:
RequireOptions(options, 'user', 'access_token')
- print ('OAuth2 argument:\n' +
- GenerateOAuth2String(options.user, options.access_token))
+ print(('OAuth2 argument:\n' +
+ GenerateOAuth2String(options.user, options.access_token)))
elif options.generate_oauth2_token:
RequireOptions(options, 'client_id', 'client_secret')
- print 'To authorize token, visit this url and follow the directions:'
- print ' %s' % GeneratePermissionUrl(options.client_id, options.scope)
- authorization_code = raw_input('Enter verification code: ')
+ print('To authorize token, visit this url and follow the directions:')
+ print(' %s' % GeneratePermissionUrl(options.client_id, options.scope))
+ authorization_code = input('Enter verification code: ')
response = AuthorizeTokens(options.client_id, options.client_secret,
authorization_code)
- print 'Refresh Token: %s' % response['refresh_token']
- print 'Access Token: %s' % response['access_token']
- print 'Access Token Expiration Seconds: %s' % response['expires_in']
+ print('Refresh Token: %s' % response['refresh_token'])
+ print('Access Token: %s' % response['access_token'])
+ print('Access Token Expiration Seconds: %s' % response['expires_in'])
elif options.test_imap_authentication:
RequireOptions(options, 'user', 'access_token')
TestImapAuthentication(options.user,
@@ -327,7 +331,7 @@ def main(argv):
base64_encode=False))
else:
options_parser.print_help()
- print 'Nothing to do, exiting.'
+ print('Nothing to do, exiting.')
return
diff --git a/src/sandbox/oauth2_runner.py b/src/sandbox/oauth2_runner.py
index b74effd7..3e401b3a 100755
--- a/src/sandbox/oauth2_runner.py
+++ b/src/sandbox/oauth2_runner.py
@@ -1,335 +1,339 @@
-#!/usr/bin/python
-#
-# Copyright 2012 Google Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
- # http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Performs client tasks for testing IMAP OAuth2 authentication.
-
-To use this script, you'll need to have registered with Google as an OAuth
-application and obtained an OAuth client ID and client secret.
-See http://code.google.com/apis/accounts/docs/OAuth2.html for instructions on
-registering and for documentation of the APIs invoked by this code.
-
-This script has 3 modes of operation.
-
-1. The first mode is used to generate and authorize an OAuth2 token, the
-first step in logging in via OAuth2.
-
- oauth2 --user=xxx@gmail.com \
- --client_id=1038[...].apps.googleusercontent.com \
- --client_secret=VWFn8LIKAMC-MsjBMhJeOplZ \
- --generate_oauth2_token
-
-The script will converse with Google and generate an oauth request
-token, then present you with a URL you should visit in your browser to
-authorize the token. Once you get the verification code from the Google
-website, enter it into the script to get your OAuth access token. The output
-from this command will contain the access token, a refresh token, and some
-metadata about the tokens. The access token can be used until it expires, and
-the refresh token lasts indefinitely, so you should record these values for
-reuse.
-
-2. The script will generate new access tokens using a refresh token.
-
- oauth2 --user=xxx@gmail.com \
- --client_id=1038[...].apps.googleusercontent.com \
- --client_secret=VWFn8LIKAMC-MsjBMhJeOplZ \
- --refresh_token=1/Yzm6MRy4q1xi7Dx2DuWXNgT6s37OrP_DW_IoyTum4YA
-
-3. The script will generate an OAuth2 string that can be fed
-directly to IMAP or SMTP. This is triggered with the --generate_oauth2_string
-option.
-
- oauth2 --generate_oauth2_string --user=xxx@gmail.com \
- --access_token=ya29.AGy[...]ezLg
-
-The output of this mode will be a base64-encoded string. To use it, connect to a
-IMAPFE and pass it as the second argument to the AUTHENTICATE command.
-
- a AUTHENTICATE XOAUTH2 a9sha9sfs[...]9dfja929dk==
-"""
-
-import base64
-import imaplib
-import json
-from optparse import OptionParser
-import smtplib
-import sys
-import urllib
-
-
-def SetupOptionParser():
- # Usage message is the module's docstring.
- parser = OptionParser(usage=__doc__)
- parser.add_option('--generate_oauth2_token',
- action='store_true',
- dest='generate_oauth2_token',
- help='generates an OAuth2 token for testing')
- parser.add_option('--generate_oauth2_string',
- action='store_true',
- dest='generate_oauth2_string',
- help='generates an initial client response string for '
- 'OAuth2')
- parser.add_option('--client_id',
- default=None,
- help='Client ID of the application that is authenticating. '
- 'See OAuth2 documentation for details.')
- parser.add_option('--client_secret',
- default=None,
- help='Client secret of the application that is '
- 'authenticating. See OAuth2 documentation for '
- 'details.')
- parser.add_option('--access_token',
- default=None,
- help='OAuth2 access token')
- parser.add_option('--refresh_token',
- default=None,
- help='OAuth2 refresh token')
- parser.add_option('--scope',
- default='https://mail.google.com/',
- help='scope for the access token. Multiple scopes can be '
- 'listed separated by spaces with the whole argument '
- 'quoted.')
- parser.add_option('--test_imap_authentication',
- action='store_true',
- dest='test_imap_authentication',
- help='attempts to authenticate to IMAP')
- parser.add_option('--test_smtp_authentication',
- action='store_true',
- dest='test_smtp_authentication',
- help='attempts to authenticate to SMTP')
- parser.add_option('--user',
- default=None,
- help='email address of user whose account is being '
- 'accessed')
- return parser
-
-
-# The URL root for accessing Google Accounts.
-GOOGLE_ACCOUNTS_BASE_URL = 'https://accounts.google.com'
-
-
-# Hardcoded dummy redirect URI for non-web apps.
-REDIRECT_URI = 'urn:ietf:wg:oauth:2.0:oob'
-
-
-def AccountsUrl(command):
- """Generates the Google Accounts URL.
-
- Args:
- command: The command to execute.
-
- Returns:
- A URL for the given command.
- """
- return '%s/%s' % (GOOGLE_ACCOUNTS_BASE_URL, command)
-
-
-def UrlEscape(text):
- # See OAUTH 5.1 for a definition of which characters need to be escaped.
- return urllib.quote(text, safe='~-._')
-
-
-def UrlUnescape(text):
- # See OAUTH 5.1 for a definition of which characters need to be escaped.
- return urllib.unquote(text)
-
-
-def FormatUrlParams(params):
- """Formats parameters into a URL query string.
-
- Args:
- params: A key-value map.
-
- Returns:
- A URL query string version of the given parameters.
- """
- param_fragments = []
- for param in sorted(params.iteritems(), key=lambda x: x[0]):
- param_fragments.append('%s=%s' % (param[0], UrlEscape(param[1])))
- return '&'.join(param_fragments)
-
-
-def GeneratePermissionUrl(client_id, scope='https://mail.google.com/'):
- """Generates the URL for authorizing access.
-
- This uses the "OAuth2 for Installed Applications" flow described at
- https://developers.google.com/accounts/docs/OAuth2InstalledApp
-
- Args:
- client_id: Client ID obtained by registering your app.
- scope: scope for access token, e.g. 'https://mail.google.com'
- Returns:
- A URL that the user should visit in their browser.
- """
- params = {}
- params['client_id'] = client_id
- params['redirect_uri'] = REDIRECT_URI
- params['scope'] = scope
- params['response_type'] = 'code'
- return '%s?%s' % (AccountsUrl('o/oauth2/auth'),
- FormatUrlParams(params))
-
-
-def AuthorizeTokens(client_id, client_secret, authorization_code):
- """Obtains OAuth access token and refresh token.
-
- This uses the application portion of the "OAuth2 for Installed Applications"
- flow at https://developers.google.com/accounts/docs/OAuth2InstalledApp#handlingtheresponse
-
- Args:
- client_id: Client ID obtained by registering your app.
- client_secret: Client secret obtained by registering your app.
- authorization_code: code generated by Google Accounts after user grants
- permission.
- Returns:
- The decoded response from the Google Accounts server, as a dict. Expected
- fields include 'access_token', 'expires_in', and 'refresh_token'.
- """
- params = {}
- params['client_id'] = client_id
- params['client_secret'] = client_secret
- params['code'] = authorization_code
- params['redirect_uri'] = REDIRECT_URI
- params['grant_type'] = 'authorization_code'
- request_url = AccountsUrl('o/oauth2/token')
-
- response = urllib.urlopen(request_url, urllib.urlencode(params)).read()
- return json.loads(response)
-
-
-def RefreshToken(client_id, client_secret, refresh_token):
- """Obtains a new token given a refresh token.
-
- See https://developers.google.com/accounts/docs/OAuth2InstalledApp#refresh
-
- Args:
- client_id: Client ID obtained by registering your app.
- client_secret: Client secret obtained by registering your app.
- refresh_token: A previously-obtained refresh token.
- Returns:
- The decoded response from the Google Accounts server, as a dict. Expected
- fields include 'access_token', 'expires_in', and 'refresh_token'.
- """
- params = {}
- params['client_id'] = client_id
- params['client_secret'] = client_secret
- params['refresh_token'] = refresh_token
- params['grant_type'] = 'refresh_token'
- request_url = AccountsUrl('o/oauth2/token')
-
- response = urllib.urlopen(request_url, urllib.urlencode(params)).read()
- return json.loads(response)
-
-
-def GenerateOAuth2String(username, access_token, base64_encode=True):
- """Generates an IMAP OAuth2 authentication string.
-
- See https://developers.google.com/google-apps/gmail/oauth2_overview
-
- Args:
- username: the username (email address) of the account to authenticate
- access_token: An OAuth2 access token.
- base64_encode: Whether to base64-encode the output.
-
- Returns:
- The SASL argument for the OAuth2 mechanism.
- """
- auth_string = 'user=%s\1auth=Bearer %s\1\1' % (username, access_token)
- if base64_encode:
- auth_string = base64.b64encode(auth_string)
- return auth_string
-
-
-def TestImapAuthentication(user, auth_string):
- """Authenticates to IMAP with the given auth_string.
-
- Prints a debug trace of the attempted IMAP connection.
-
- Args:
- user: The Gmail username (full email address)
- auth_string: A valid OAuth2 string, as returned by GenerateOAuth2String.
- Must not be base64-encoded, since imaplib does its own base64-encoding.
- """
- print
- imap_conn = imaplib.IMAP4_SSL('imap.gmail.com')
- imap_conn.debug = 4
- imap_conn.authenticate('XOAUTH2', lambda x: auth_string)
- imap_conn.select('INBOX')
-
-
-def TestSmtpAuthentication(user, auth_string):
- """Authenticates to SMTP with the given auth_string.
-
- Args:
- user: The Gmail username (full email address)
- auth_string: A valid OAuth2 string, not base64-encoded, as returned by
- GenerateOAuth2String.
- """
- print
- smtp_conn = smtplib.SMTP('smtp.gmail.com', 587)
- smtp_conn.set_debuglevel(True)
- smtp_conn.ehlo('test')
- smtp_conn.starttls()
- smtp_conn.docmd('AUTH', 'XOAUTH2 ' + base64.b64encode(auth_string))
-
-
-def RequireOptions(options, *args):
- missing = [arg for arg in args if getattr(options, arg) is None]
- if missing:
- print 'Missing options: %s' % ' '.join(missing)
- sys.exit(-1)
-
-
-def main(argv):
- options_parser = SetupOptionParser()
- (options, args) = options_parser.parse_args()
- if options.refresh_token:
- RequireOptions(options, 'client_id', 'client_secret')
- response = RefreshToken(options.client_id, options.client_secret,
- options.refresh_token)
- print 'Access Token: %s' % response['access_token']
- print 'Access Token Expiration Seconds: %s' % response['expires_in']
- elif options.generate_oauth2_string:
- RequireOptions(options, 'user', 'access_token')
- print ('OAuth2 argument:\n' +
- GenerateOAuth2String(options.user, options.access_token))
- elif options.generate_oauth2_token:
- RequireOptions(options, 'client_id', 'client_secret')
- print 'To authorize token, visit this url and follow the directions:'
- print ' %s' % GeneratePermissionUrl(options.client_id, options.scope)
- authorization_code = raw_input('Enter verification code: ')
- response = AuthorizeTokens(options.client_id, options.client_secret,
- authorization_code)
- print 'Refresh Token: %s' % response['refresh_token']
- print 'Access Token: %s' % response['access_token']
- print 'Access Token Expiration Seconds: %s' % response['expires_in']
- elif options.test_imap_authentication:
- RequireOptions(options, 'user', 'access_token')
- TestImapAuthentication(options.user,
- GenerateOAuth2String(options.user, options.access_token,
- base64_encode=False))
- elif options.test_smtp_authentication:
- RequireOptions(options, 'user', 'access_token')
- TestSmtpAuthentication(options.user,
- GenerateOAuth2String(options.user, options.access_token,
- base64_encode=False))
- else:
- options_parser.print_help()
- print 'Nothing to do, exiting.'
- return
-
-
-if __name__ == '__main__':
- main(sys.argv)
+#!/usr/bin/python
+#
+# Copyright 2012 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+ # http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Performs client tasks for testing IMAP OAuth2 authentication.
+
+To use this script, you'll need to have registered with Google as an OAuth
+application and obtained an OAuth client ID and client secret.
+See http://code.google.com/apis/accounts/docs/OAuth2.html for instructions on
+registering and for documentation of the APIs invoked by this code.
+
+This script has 3 modes of operation.
+
+1. The first mode is used to generate and authorize an OAuth2 token, the
+first step in logging in via OAuth2.
+
+ oauth2 --user=xxx@gmail.com \
+ --client_id=1038[...].apps.googleusercontent.com \
+ --client_secret=VWFn8LIKAMC-MsjBMhJeOplZ \
+ --generate_oauth2_token
+
+The script will converse with Google and generate an oauth request
+token, then present you with a URL you should visit in your browser to
+authorize the token. Once you get the verification code from the Google
+website, enter it into the script to get your OAuth access token. The output
+from this command will contain the access token, a refresh token, and some
+metadata about the tokens. The access token can be used until it expires, and
+the refresh token lasts indefinitely, so you should record these values for
+reuse.
+
+2. The script will generate new access tokens using a refresh token.
+
+ oauth2 --user=xxx@gmail.com \
+ --client_id=1038[...].apps.googleusercontent.com \
+ --client_secret=VWFn8LIKAMC-MsjBMhJeOplZ \
+ --refresh_token=1/Yzm6MRy4q1xi7Dx2DuWXNgT6s37OrP_DW_IoyTum4YA
+
+3. The script will generate an OAuth2 string that can be fed
+directly to IMAP or SMTP. This is triggered with the --generate_oauth2_string
+option.
+
+ oauth2 --generate_oauth2_string --user=xxx@gmail.com \
+ --access_token=ya29.AGy[...]ezLg
+
+The output of this mode will be a base64-encoded string. To use it, connect to a
+IMAPFE and pass it as the second argument to the AUTHENTICATE command.
+
+ a AUTHENTICATE XOAUTH2 a9sha9sfs[...]9dfja929dk==
+"""
+
+from __future__ import absolute_import, print_function
+
+import base64
+import imaplib
+import json
+from optparse import OptionParser
+import smtplib
+import sys
+import urllib
+import six
+from six.moves import input
+
+
+def SetupOptionParser():
+ # Usage message is the module's docstring.
+ parser = OptionParser(usage=__doc__)
+ parser.add_option('--generate_oauth2_token',
+ action='store_true',
+ dest='generate_oauth2_token',
+ help='generates an OAuth2 token for testing')
+ parser.add_option('--generate_oauth2_string',
+ action='store_true',
+ dest='generate_oauth2_string',
+ help='generates an initial client response string for '
+ 'OAuth2')
+ parser.add_option('--client_id',
+ default=None,
+ help='Client ID of the application that is authenticating. '
+ 'See OAuth2 documentation for details.')
+ parser.add_option('--client_secret',
+ default=None,
+ help='Client secret of the application that is '
+ 'authenticating. See OAuth2 documentation for '
+ 'details.')
+ parser.add_option('--access_token',
+ default=None,
+ help='OAuth2 access token')
+ parser.add_option('--refresh_token',
+ default=None,
+ help='OAuth2 refresh token')
+ parser.add_option('--scope',
+ default='https://mail.google.com/',
+ help='scope for the access token. Multiple scopes can be '
+ 'listed separated by spaces with the whole argument '
+ 'quoted.')
+ parser.add_option('--test_imap_authentication',
+ action='store_true',
+ dest='test_imap_authentication',
+ help='attempts to authenticate to IMAP')
+ parser.add_option('--test_smtp_authentication',
+ action='store_true',
+ dest='test_smtp_authentication',
+ help='attempts to authenticate to SMTP')
+ parser.add_option('--user',
+ default=None,
+ help='email address of user whose account is being '
+ 'accessed')
+ return parser
+
+
+# The URL root for accessing Google Accounts.
+GOOGLE_ACCOUNTS_BASE_URL = 'https://accounts.google.com'
+
+
+# Hardcoded dummy redirect URI for non-web apps.
+REDIRECT_URI = 'urn:ietf:wg:oauth:2.0:oob'
+
+
+def AccountsUrl(command):
+ """Generates the Google Accounts URL.
+
+ Args:
+ command: The command to execute.
+
+ Returns:
+ A URL for the given command.
+ """
+ return '%s/%s' % (GOOGLE_ACCOUNTS_BASE_URL, command)
+
+
+def UrlEscape(text):
+ # See OAUTH 5.1 for a definition of which characters need to be escaped.
+ return urllib.quote(text, safe='~-._')
+
+
+def UrlUnescape(text):
+ # See OAUTH 5.1 for a definition of which characters need to be escaped.
+ return urllib.unquote(text)
+
+
+def FormatUrlParams(params):
+ """Formats parameters into a URL query string.
+
+ Args:
+ params: A key-value map.
+
+ Returns:
+ A URL query string version of the given parameters.
+ """
+ param_fragments = []
+ for param in sorted(six.iteritems(params), key=lambda x: x[0]):
+ param_fragments.append('%s=%s' % (param[0], UrlEscape(param[1])))
+ return '&'.join(param_fragments)
+
+
+def GeneratePermissionUrl(client_id, scope='https://mail.google.com/'):
+ """Generates the URL for authorizing access.
+
+ This uses the "OAuth2 for Installed Applications" flow described at
+ https://developers.google.com/accounts/docs/OAuth2InstalledApp
+
+ Args:
+ client_id: Client ID obtained by registering your app.
+ scope: scope for access token, e.g. 'https://mail.google.com'
+ Returns:
+ A URL that the user should visit in their browser.
+ """
+ params = {}
+ params['client_id'] = client_id
+ params['redirect_uri'] = REDIRECT_URI
+ params['scope'] = scope
+ params['response_type'] = 'code'
+ return '%s?%s' % (AccountsUrl('o/oauth2/auth'),
+ FormatUrlParams(params))
+
+
+def AuthorizeTokens(client_id, client_secret, authorization_code):
+ """Obtains OAuth access token and refresh token.
+
+ This uses the application portion of the "OAuth2 for Installed Applications"
+ flow at https://developers.google.com/accounts/docs/OAuth2InstalledApp#handlingtheresponse
+
+ Args:
+ client_id: Client ID obtained by registering your app.
+ client_secret: Client secret obtained by registering your app.
+ authorization_code: code generated by Google Accounts after user grants
+ permission.
+ Returns:
+ The decoded response from the Google Accounts server, as a dict. Expected
+ fields include 'access_token', 'expires_in', and 'refresh_token'.
+ """
+ params = {}
+ params['client_id'] = client_id
+ params['client_secret'] = client_secret
+ params['code'] = authorization_code
+ params['redirect_uri'] = REDIRECT_URI
+ params['grant_type'] = 'authorization_code'
+ request_url = AccountsUrl('o/oauth2/token')
+
+ response = urllib.urlopen(request_url, urllib.urlencode(params)).read()
+ return json.loads(response)
+
+
+def RefreshToken(client_id, client_secret, refresh_token):
+ """Obtains a new token given a refresh token.
+
+ See https://developers.google.com/accounts/docs/OAuth2InstalledApp#refresh
+
+ Args:
+ client_id: Client ID obtained by registering your app.
+ client_secret: Client secret obtained by registering your app.
+ refresh_token: A previously-obtained refresh token.
+ Returns:
+ The decoded response from the Google Accounts server, as a dict. Expected
+ fields include 'access_token', 'expires_in', and 'refresh_token'.
+ """
+ params = {}
+ params['client_id'] = client_id
+ params['client_secret'] = client_secret
+ params['refresh_token'] = refresh_token
+ params['grant_type'] = 'refresh_token'
+ request_url = AccountsUrl('o/oauth2/token')
+
+ response = urllib.urlopen(request_url, urllib.urlencode(params)).read()
+ return json.loads(response)
+
+
+def GenerateOAuth2String(username, access_token, base64_encode=True):
+ """Generates an IMAP OAuth2 authentication string.
+
+ See https://developers.google.com/google-apps/gmail/oauth2_overview
+
+ Args:
+ username: the username (email address) of the account to authenticate
+ access_token: An OAuth2 access token.
+ base64_encode: Whether to base64-encode the output.
+
+ Returns:
+ The SASL argument for the OAuth2 mechanism.
+ """
+ auth_string = 'user=%s\1auth=Bearer %s\1\1' % (username, access_token)
+ if base64_encode:
+ auth_string = base64.b64encode(auth_string)
+ return auth_string
+
+
+def TestImapAuthentication(user, auth_string):
+ """Authenticates to IMAP with the given auth_string.
+
+ Prints a debug trace of the attempted IMAP connection.
+
+ Args:
+ user: The Gmail username (full email address)
+ auth_string: A valid OAuth2 string, as returned by GenerateOAuth2String.
+ Must not be base64-encoded, since imaplib does its own base64-encoding.
+ """
+ print()
+ imap_conn = imaplib.IMAP4_SSL('imap.gmail.com')
+ imap_conn.debug = 4
+ imap_conn.authenticate('XOAUTH2', lambda x: auth_string)
+ imap_conn.select('INBOX')
+
+
+def TestSmtpAuthentication(user, auth_string):
+ """Authenticates to SMTP with the given auth_string.
+
+ Args:
+ user: The Gmail username (full email address)
+ auth_string: A valid OAuth2 string, not base64-encoded, as returned by
+ GenerateOAuth2String.
+ """
+ print()
+ smtp_conn = smtplib.SMTP('smtp.gmail.com', 587)
+ smtp_conn.set_debuglevel(True)
+ smtp_conn.ehlo('test')
+ smtp_conn.starttls()
+ smtp_conn.docmd('AUTH', 'XOAUTH2 ' + base64.b64encode(auth_string))
+
+
+def RequireOptions(options, *args):
+ missing = [arg for arg in args if getattr(options, arg) is None]
+ if missing:
+ print('Missing options: %s' % ' '.join(missing))
+ sys.exit(-1)
+
+
+def main(argv):
+ options_parser = SetupOptionParser()
+ (options, args) = options_parser.parse_args()
+ if options.refresh_token:
+ RequireOptions(options, 'client_id', 'client_secret')
+ response = RefreshToken(options.client_id, options.client_secret,
+ options.refresh_token)
+ print('Access Token: %s' % response['access_token'])
+ print('Access Token Expiration Seconds: %s' % response['expires_in'])
+ elif options.generate_oauth2_string:
+ RequireOptions(options, 'user', 'access_token')
+ print(('OAuth2 argument:\n' +
+ GenerateOAuth2String(options.user, options.access_token)))
+ elif options.generate_oauth2_token:
+ RequireOptions(options, 'client_id', 'client_secret')
+ print('To authorize token, visit this url and follow the directions:')
+ print(' %s' % GeneratePermissionUrl(options.client_id, options.scope))
+ authorization_code = input('Enter verification code: ')
+ response = AuthorizeTokens(options.client_id, options.client_secret,
+ authorization_code)
+ print('Refresh Token: %s' % response['refresh_token'])
+ print('Access Token: %s' % response['access_token'])
+ print('Access Token Expiration Seconds: %s' % response['expires_in'])
+ elif options.test_imap_authentication:
+ RequireOptions(options, 'user', 'access_token')
+ TestImapAuthentication(options.user,
+ GenerateOAuth2String(options.user, options.access_token,
+ base64_encode=False))
+ elif options.test_smtp_authentication:
+ RequireOptions(options, 'user', 'access_token')
+ TestSmtpAuthentication(options.user,
+ GenerateOAuth2String(options.user, options.access_token,
+ base64_encode=False))
+ else:
+ options_parser.print_help()
+ print('Nothing to do, exiting.')
+ return
+
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/src/sandbox/oauth2_tests.py b/src/sandbox/oauth2_tests.py
index fdf52331..67c2c5d9 100755
--- a/src/sandbox/oauth2_tests.py
+++ b/src/sandbox/oauth2_tests.py
@@ -1,37 +1,39 @@
-__author__ = 'Aubert'
-
-import httplib2
-from six.moves import input
-from oauth2client.client import OAuth2WebServerFlow
-
-CLIENT_ID = "some-ids"
-CLIENT_SECRET = "secret"
-SCOPES = ['https://mail.google.com/', # IMAP/SMTP client access
- 'https://www.googleapis.com/auth/email'] # Email address access (verify token authorized by correct account
-
-def test_oauth2_with_google():
- """
- Do something
- :return:
- """
-
- flow = OAuth2WebServerFlow(CLIENT_ID, CLIENT_SECRET, " ".join(SCOPES))
-
- # Step 1: get user code and verification URL
- # https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingacode
- flow_info = flow.step1_get_device_and_user_codes()
- print "Enter the following code at %s: %s" % (flow_info.verification_url,
- flow_info.user_code)
- print "Then press Enter."
- input()
-
- # Step 2: get credentials
- # https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingatoken
- credentials = flow.step2_exchange(device_flow_info=flow_info)
- print "Access token:", credentials.access_token
- print "Refresh token:", credentials.refresh_token
-
-#Get IMAP Service
-
-if __name__ == '__main__':
+from __future__ import absolute_import, print_function
+
+__author__ = 'Aubert'
+
+import httplib2
+from six.moves import input
+from oauth2client.client import OAuth2WebServerFlow
+
+CLIENT_ID = "some-ids"
+CLIENT_SECRET = "secret"
+SCOPES = ['https://mail.google.com/', # IMAP/SMTP client access
+ 'https://www.googleapis.com/auth/email'] # Email address access (verify token authorized by correct account
+
+def test_oauth2_with_google():
+ """
+ Do something
+ :return:
+ """
+
+ flow = OAuth2WebServerFlow(CLIENT_ID, CLIENT_SECRET, " ".join(SCOPES))
+
+ # Step 1: get user code and verification URL
+ # https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingacode
+ flow_info = flow.step1_get_device_and_user_codes()
+ print("Enter the following code at %s: %s" % (flow_info.verification_url,
+ flow_info.user_code))
+ print("Then press Enter.")
+ input()
+
+ # Step 2: get credentials
+ # https://developers.google.com/accounts/docs/OAuth2ForDevices#obtainingatoken
+ credentials = flow.step2_exchange(device_flow_info=flow_info)
+ print("Access token:", credentials.access_token)
+ print("Refresh token:", credentials.refresh_token)
+
+#Get IMAP Service
+
+if __name__ == '__main__':
test_oauth2_with_google()
\ No newline at end of file
diff --git a/src/sandbox/pycrypto_test.py b/src/sandbox/pycrypto_test.py
index bbf04eaf..c21208b4 100644
--- a/src/sandbox/pycrypto_test.py
+++ b/src/sandbox/pycrypto_test.py
@@ -1,3 +1,5 @@
+from __future__ import absolute_import, print_function
+
import os, base64
from Crypto.Cipher import AES
import hashlib
@@ -10,7 +12,7 @@
class AESEncryptor(object):
- def __init__(self, key):
+ def __init__(self, key):
self.bs = 32
self.key = hashlib.sha256(key.encode()).digest()
@@ -38,8 +40,8 @@ def _unpad(s):
key="This is my key"
enc = AESEncryptor(key)
for secret in secrets:
- print "Secret:", secret
- encrypted = enc.encrypt(secret)
- print "Encrypted secret:", encrypted
- print "Clear Secret:" , enc.decrypt(encrypted)
- print '-' *50
+ print("Secret:", secret)
+ encrypted = enc.encrypt(secret)
+ print("Encrypted secret:", encrypted)
+ print("Clear Secret:" , enc.decrypt(encrypted))
+ print('-' *50)
diff --git a/src/sandbox/python_api_tests.py b/src/sandbox/python_api_tests.py
index 1dc57441..244dd421 100644
--- a/src/sandbox/python_api_tests.py
+++ b/src/sandbox/python_api_tests.py
@@ -1,5 +1,7 @@
#!/usr/bin/python
+from __future__ import absolute_import, print_function
+
import argparse
import httplib2
@@ -44,4 +46,4 @@
# Print ID for each thread
if threads['threads']:
for thread in threads['threads']:
- print 'Thread ID: %s' % (thread['id'])
+ print('Thread ID: %s' % (thread['id']))
diff --git a/src/sandbox/test_wx.py b/src/sandbox/test_wx.py
index cb263b14..74faa03b 100644
--- a/src/sandbox/test_wx.py
+++ b/src/sandbox/test_wx.py
@@ -1,5 +1,7 @@
# border.py
+from __future__ import absolute_import
+
import wx
ID_NEW = 1
@@ -8,17 +10,17 @@
ID_DELETE = 4
class Example(wx.Frame):
-
+
def __init__(self, parent, title):
- super(Example, self).__init__(parent, title=title,
+ super(Example, self).__init__(parent, title=title,
size=(260, 180))
-
+
self.InitUI()
self.Centre()
- self.Show()
-
+ self.Show()
+
def InitUI(self):
-
+
panel = wx.Panel(self)
panel.SetBackgroundColour('#4f5049')
@@ -79,7 +81,7 @@ def InitUI(self):
if __name__ == '__main__':
-
+
app = wx.App()
Example(None, title='Gmvault-test')
app.MainLoop()
diff --git a/src/sandbox/unicode_test.py b/src/sandbox/unicode_test.py
index 22e64bc4..0dbc3559 100644
--- a/src/sandbox/unicode_test.py
+++ b/src/sandbox/unicode_test.py
@@ -1,40 +1,43 @@
# -*- coding: utf-8 -*-
+from __future__ import absolute_import, print_function
+
import sys
import unicodedata
+import six
def ascii_hex(str):
new_str = ""
for c in str:
new_str += "%s=hex[%s]," % (c,hex(ord(c)))
return new_str
-
+
def convert_to_utf8(a_str):
"""
"""
if type(a_str) != type(u'a'):
- #import chardet
- #char_enc = chardet.detect(a_str)
- #print("detected encoding = %s" % (char_enc))
- #print("system machine encoding = %s" % (sys.getdefaultencoding()))
- #u_str = unicode(a_str, char_enc['encoding'], errors='ignore')
- u_str = unicode(a_str, 'cp437', errors='ignore')
+ #import chardet
+ #char_enc = chardet.detect(a_str)
+ #print("detected encoding = %s" % (char_enc))
+ #print("system machine encoding = %s" % (sys.getdefaultencoding()))
+ #u_str = unicode(a_str, char_enc['encoding'], errors='ignore')
+ u_str = six.text_type(a_str, 'cp437', errors='ignore')
else:
print("Already unicode do not convert")
u_str = a_str
- print("raw unicode = %s" % (u_str))
+ print(("raw unicode = %s" % (u_str)))
#u_str = unicodedata.normalize('NFKC',u_str)
u_str = u_str.encode('unicode_escape').decode('unicode_escape')
- print("unicode escape = %s" % (u_str))
- print("normalized unicode(NFKD) = %s" % (repr(unicodedata.normalize('NFKD',u_str))))
- print("normalized unicode(NFKC) = %s" % (repr(unicodedata.normalize('NFKC',u_str))))
- print("normalized unicode(NFC) = %s" % (repr(unicodedata.normalize('NFC',u_str))))
- print("normalized unicode(NFD) = %s" % (repr(unicodedata.normalize('NFD',u_str))))
+ print(("unicode escape = %s" % (u_str)))
+ print(("normalized unicode(NFKD) = %s" % (repr(unicodedata.normalize('NFKD',u_str)))))
+ print(("normalized unicode(NFKC) = %s" % (repr(unicodedata.normalize('NFKC',u_str)))))
+ print(("normalized unicode(NFC) = %s" % (repr(unicodedata.normalize('NFC',u_str)))))
+ print(("normalized unicode(NFD) = %s" % (repr(unicodedata.normalize('NFD',u_str)))))
hex_s = ascii_hex(u_str)
- print("Hex ascii %s" % (hex_s))
+ print(("Hex ascii %s" % (hex_s)))
utf8_arg = u_str
#utf8_arg = u_str.encode("utf-8")
-
+
return utf8_arg
if __name__ == '__main__':
diff --git a/src/sandbox_tests.py b/src/sandbox_tests.py
index f959205a..d5ba3fc3 100755
--- a/src/sandbox_tests.py
+++ b/src/sandbox_tests.py
@@ -24,6 +24,8 @@
Experimentation and validation of internal mechanisms
'''
+from __future__ import absolute_import, print_function
+
import unittest
import base64
import socket
@@ -67,83 +69,83 @@ class TestSandbox(unittest.TestCase): #pylint:disable-msg=R0904
def __init__(self, stuff):
""" constructor """
super(TestSandbox, self).__init__(stuff)
-
+
self.login = None
self.passwd = None
-
+
self.gmvault_login = None
- self.gmvault_passwd = None
-
+ self.gmvault_passwd = None
+
def setUp(self): #pylint:disable-msg=C0103
self.login, self.passwd = read_password_file('/homespace/gaubert/.ssh/passwd')
-
+
self.gmvault_login, self.gmvault_passwd = read_password_file('/homespace/gaubert/.ssh/gsync_passwd')
-
-
+
+
def ztest_logger(self):
"""
Test the logging mechanism
"""
-
+
import gmv.log_utils as log_utils
- log_utils.LoggerFactory.setup_cli_app_handler('./gmv.log')
-
+ log_utils.LoggerFactory.setup_cli_app_handler('./gmv.log')
+
LOG = log_utils.LoggerFactory.get_logger('gmv') #pylint:disable-msg=C0103
-
+
LOG.info("On Info")
-
+
LOG.warning("On Warning")
-
+
LOG.error("On Error")
-
+
LOG.notice("On Notice")
-
+
try:
raise Exception("Exception. This is my exception")
self.fail("Should never arrive here") #pylint:disable-msg=W0101
- except Exception, err: #pylint:disable-msg=W0101, W0703
+ except Exception as err: #pylint:disable-msg=W0101, W0703
LOG.exception("error,", err)
-
+
LOG.critical("On Critical")
-
+
def ztest_encrypt_blowfish(self):
"""
Test encryption with blowfish
"""
file_path = '../etc/tests/test_few_days_syncer/2384403887202624608.eml.gz'
-
+
import gzip
import gmv.blowfish
-
+
#create blowfish cipher
cipher = gmv.blowfish.Blowfish('VerySeCretKey')
-
+
gz_fd = gzip.open(file_path)
-
+
content = gz_fd.read()
-
+
cipher.initCTR()
crypted = cipher.encryptCTR(content)
-
+
cipher.initCTR()
decrypted = cipher.decryptCTR(crypted)
-
+
self.assertEquals(decrypted, content)
-
+
def ztest_regexpr(self):
"""
- regexpr for
+ regexpr for
"""
import re
the_str = "Subject: Marta Gutierrez commented on her Wall post.\nMessage-ID: "
regexpr = "Subject:\s+(?P.*)\s+Message-ID:\s+<(?P.*)>"
reg = re.compile(regexpr)
-
+
matched = reg.match(the_str)
if matched:
print("Matched")
- print("subject=[%s],messageid=[%s]" % (matched.group('subject'), matched.group('msgid')))
-
+ print(("subject=[%s],messageid=[%s]" % (matched.group('subject'), matched.group('msgid'))))
+
def ztest_is_encrypted_regexpr(self):
"""
Encrypted re
@@ -151,15 +153,15 @@ def ztest_is_encrypted_regexpr(self):
import re
the_str ="1384313269332005293.eml.crypt.gz"
regexpr ="[\w+,\.]+crypt[\w,\.]*"
-
+
reg= re.compile(regexpr)
matched = reg.match(the_str)
if matched:
print("\nMatched")
else:
print("\nUnmatched")
-
-
+
+
def ztest_memory_error_bug(self):
"""
Try to push the memory error
@@ -173,7 +175,7 @@ def ztest_memory_error_bug(self):
email_body = f.read()
mail = email.message_from_string(email_body)
- print mail
+ print(mail)
sys.argv = ['gmvault.py', 'restore', '--db-dir',
'/Users/gaubert/gmvault-data/gmvault-db-bug',
@@ -186,21 +188,21 @@ def ztest_retry_mode(self):
Test that the decorators are functionning properly
"""
class MonkeyIMAPFetcher(imap_utils.GIMAPFetcher):
-
+
def __init__(self, host, port, login, credential, readonly_folder = True):
"""
Constructor
"""
super(MonkeyIMAPFetcher, self).__init__( host, port, login, credential, readonly_folder)
self.connect_nb = 0
-
+
def connect(self):
"""
connect
"""
self.connect_nb += 1
-
- @imap_utils.retry(3,1,2)
+
+ @imap_utils.retry(3,1,2)
def push_email(self, a_body, a_flags, a_internal_time, a_labels):
"""
Throw exceptions
@@ -209,69 +211,69 @@ def push_email(self, a_body, a_flags, a_internal_time, a_labels):
#raise imaplib.IMAP4.abort("GIMAPFetcher cannot restore email in %s account." %("myaccount@gmail.com"))
raise socket.error("Error")
#raise imap_utils.PushEmailError("GIMAPFetcher cannot restore email in %s account." %("myaccount@gmail.com"))
-
-
+
+
imap_fetch = MonkeyIMAPFetcher(host = None, port = None, login = None, credential = None)
try:
imap_fetch.push_email(None, None, None, None)
#except Exception, err:
- except imaplib.IMAP4.error, err:
+ except imaplib.IMAP4.error as err:
self.assertEquals('GIMAPFetcher cannot restore email in myaccount@gmail.com account.', str(err))
-
+
self.assertEquals(imap_fetch.connect_nb, 3)
-
+
def ztest_os_walk(self):
"""
test os walk
"""
import os
for root, dirs, files in os.walk('/Users/gaubert/Dev/projects/gmvault/src/gmv/gmvault-db/db'):
- print("root: %s, sub-dirs : %s, files = %s" % (root, dirs, files))
-
+ print(("root: %s, sub-dirs : %s, files = %s" % (root, dirs, files)))
+
def ztest_get_subdir_info(self):
"""
test get subdir info
"""
import gmv.gmvault as gmv
-
+
storer = gmv.GmailStorer("/Users/gaubert/gmvault-db")
-
+
storer.init_sub_chats_dir()
-
-
-
+
+
+
def ztest_ordered_os_walk(self):
"""
test ordered os walk
"""
import gmv.gmvault_utils as gmvu
-
+
for vals in gmvu.ordered_dirwalk('/home/aubert/gmvault-db.old/db', a_wildcards="*.meta"):
- print("vals = %s\n" % (vals))
+ print(("vals = %s\n" % (vals)))
pass
-
+
import os
for root, dirs, files in os.walk('/Users/gaubert/Dev/projects/gmvault/src/gmv/gmvault-db/db'):
- print("root: %s, sub-dirs : %s, files = %s" % (root, dirs, files))
-
-
-
-
+ print(("root: %s, sub-dirs : %s, files = %s" % (root, dirs, files)))
+
+
+
+
def ztest_logging(self):
"""
Test logging
"""
#gmv_cmd.init_logging()
import gmv.log_utils as log_utils
- log_utils.LoggerFactory.setup_cli_app_handler(activate_log_file=True, file_path="/tmp/gmvault.log")
+ log_utils.LoggerFactory.setup_cli_app_handler(activate_log_file=True, file_path="/tmp/gmvault.log")
LOG = log_utils.LoggerFactory.get_logger('gmv')
LOG.critical("This is critical")
LOG.info("This is info")
LOG.error("This is error")
LOG.debug("This is debug")
-
-
+
+
def tests():
"""
@@ -279,7 +281,7 @@ def tests():
"""
suite = unittest.TestLoader().loadTestsFromTestCase(TestSandbox)
unittest.TextTestRunner(verbosity=2).run(suite)
-
+
if __name__ == '__main__':
-
+
tests()
diff --git a/src/setup_mac.py b/src/setup_mac.py
index ab2f43f3..ec12d8a6 100644
--- a/src/setup_mac.py
+++ b/src/setup_mac.py
@@ -5,12 +5,13 @@
python setup.py py2app
"""
+from __future__ import absolute_import
from setuptools import setup
#APP = ['./gmv/gmv_cmd.py']
APP = ['./gmv_runner.py']
DATA_FILES = []
-OPTIONS = {'argv_emulation': True, 'includes':['argparse', 'logbook','imapclient','chardet'],}
+OPTIONS = {'argv_emulation': True, 'includes':['argparse', 'logbook','imapclient','chardet','six'],}
setup(
app=APP,
diff --git a/src/setup_win.py b/src/setup_win.py
index f70ed910..31beeb09 100755
--- a/src/setup_win.py
+++ b/src/setup_win.py
@@ -16,6 +16,7 @@
along with this program. If not, see .
'''
+from __future__ import absolute_import
from distutils.core import setup
import py2exe
diff --git a/src/validation_tests.py b/src/validation_tests.py
index 080f49ed..2360473b 100755
--- a/src/validation_tests.py
+++ b/src/validation_tests.py
@@ -16,6 +16,8 @@
along with this program. If not, see .
'''
+from __future__ import absolute_import, print_function
+
import sys
import unittest
import base64
@@ -41,7 +43,7 @@ def read_password_file(a_path):
with open(a_path) as f:
line = f.readline()
login, passwd = line.split(":")
-
+
return deobfuscate_string(login.strip()), deobfuscate_string(passwd.strip())
def delete_db_dir(a_db_dir):
@@ -59,27 +61,27 @@ class TestGMVaultValidation(unittest.TestCase): #pylint:disable-msg=R0904
def __init__(self, stuff):
""" constructor """
super(TestGMVaultValidation, self).__init__(stuff)
-
+
self.login = None
self.passwd = None
-
+
self.gmvault_login = None
- self.gmvault_passwd = None
-
+ self.gmvault_passwd = None
+
self.default_dir = "/tmp/gmvault-tests"
-
+
def setUp(self): #pylint:disable-msg=C0103
self.login, self.passwd = read_password_file('/homespace/gaubert/.ssh/passwd')
-
+
self.gmvault_test_login, self.gmvault_test_passwd = read_password_file('/homespace/gaubert/.ssh/gsync_passwd')
-
+
def test_help_msg_spawned_by_def(self):
"""
spawn python gmv_runner account > help_msg_spawned.txt
check that res is 0 or 1
"""
pass
-
+
def test_backup_10_emails(self):
"""
backup 10 emails and check that they are backed
@@ -87,42 +89,42 @@ def test_backup_10_emails(self):
=> python gmv_runner.py sync account > checkfile
"""
pass
-
+
def test_restore_and_check(self):
"""
Restore emails, retrieve them and compare with originals
"""
db_dir = "/tmp/the_dir"
-
-
+
+
def ztest_restore_on_gmail(self):
"""
clean db disk
sync with gmail for few emails
restore them on gmail test
"""
-
+
db_dir = '/tmp/gmail_bk'
-
+
#clean db dir
delete_db_dir(db_dir)
credential = { 'type' : 'passwd', 'value': self.passwd}
gs_credential = { 'type' : 'passwd', 'value': self.gmvault_passwd}
search_req = { 'type' : 'imap', 'req': "Since 1-Nov-2011 Before 3-Nov-2011"}
-
+
syncer = gmvault.GMVaulter(db_dir, 'imap.gmail.com', 993, self.login, credential, read_only_access = False, use_encryption = True)
-
+
#syncer.sync(imap_req = "Since 1-Nov-2011 Before 4-Nov-2011")
# Nov-2007 BigDataset
syncer.sync(imap_req = search_req)
-
+
restorer = gmvault.GMVaulter(db_dir, 'imap.gmail.com', 993, self.gmvault_login, gs_credential, read_only_access = False)
restorer.restore()
-
- print("Done \n")
-
-
-
+
+ print("Done \n")
+
+
+
def tests():
"""
@@ -130,7 +132,7 @@ def tests():
"""
suite = unittest.TestLoader().loadTestsFromTestCase(TestGMVaultValidation)
unittest.TextTestRunner(verbosity=2).run(suite)
-
+
if __name__ == '__main__':
-
+
tests()