aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRafael G. Martins <rafael@rafaelmartins.eng.br>2009-12-25 23:44:32 -0200
committerRafael G. Martins <rafael@rafaelmartins.eng.br>2009-12-25 23:44:32 -0200
commit8211a9256d3296b0e128990bfdfec4d792f5145e (patch)
tree1e617577b8c630b8bb18e34b58cb9e57c9001213 /g_octave/fetch.py
parentsmall fix on g_octave/config.py; added "Download Page" section to (diff)
downloadg-octave-8211a9256d3296b0e128990bfdfec4d792f5145e.tar.gz
g-octave-8211a9256d3296b0e128990bfdfec4d792f5145e.tar.bz2
g-octave-8211a9256d3296b0e128990bfdfec4d792f5145e.zip
changes on g_octave/fetch.py to remove the missing dependency of wget
Diffstat (limited to 'g_octave/fetch.py')
-rw-r--r--g_octave/fetch.py131
1 files changed, 79 insertions, 52 deletions
diff --git a/g_octave/fetch.py b/g_octave/fetch.py
index 9093996..7d11514 100644
--- a/g_octave/fetch.py
+++ b/g_octave/fetch.py
@@ -15,10 +15,11 @@ from exception import FetchException
import urllib2
import os
import json
-import subprocess
import re
-import shutil
import tarfile
+import portage.output
+
+out = portage.output.EOutput()
re_files = {
'info.json': re.compile(r'info-([0-9]{8})-([0-9]+)\.json'),
@@ -30,57 +31,74 @@ re_files = {
def check_updates():
try:
- # we'll use urlopen to do all silensiously and save a subprocess
- fp = urllib2.urlopen(conf.db_mirror+'/update.json')
- my_json = fp.read()
- fp.close()
-
- except:
+ update = download_with_urllib2(conf.db_mirror + '/update.json', display_info=False)
+ except Exception, error:
# if we already have a file, that's ok
- if not os.path.exists(conf.db+'/update.json'):
- raise FetchException('Unable to get file list from the mirror: %s' % conf.db_mirror)
-
+ my_file = os.path.join(conf.db, 'update.json')
+ if not os.path.exists(my_file):
+ raise FetchException(error)
+ fp = open(my_file)
+ update = fp.read()
+ fp.close()
else:
- fp = open(conf.db+'/update.json', 'w', 0664)
- fp.write(my_json)
+ fp = open(os.path.join(conf.db, 'update.json'), 'w', 0644)
+ fp.write(update)
fp.close()
-
-
-def download_files():
- fp = open(conf.db+'/update.json')
- files = json.load(fp)
- fp.close()
+ updated_files = json.loads(update)
- for _file in files['files']:
- if not os.path.exists(conf.db+'/'+_file):
- download_with_wget(conf.db_mirror+'/'+_file, conf.db+'/'+_file)
- add_file_to_db_cache(_file)
+ old_files = []
+
+ for _file in updated_files['files']:
+ if not os.path.exists(os.path.join(conf.db, _file)):
+ old_files.append(_file)
+
+ return old_files
-def download_with_wget(url, dest):
-
- # TODO: let the user chooses how to fetch the files
+def download_files(files):
- ret = subprocess.call([
- '/usr/bin/wget',
- '-t', '5',
- '-T', '60',
- '--passive-ftp',
- '-O', dest+'.part',
- url,
- ])
+ for _file in files:
+ download_with_urllib2(conf.db_mirror + '/' + _file, conf.db)
+ add_file_to_db_cache(_file)
+ extract(_file)
- if ret != 0:
- raise FetchException('Failed to fetch the file: %s' % url)
- shutil.move(dest+'.part', dest)
+def download_with_urllib2(url, dest=None, display_info=True):
+
+ my_file = os.path.basename(url)
+
+ if display_info:
+ out.ebegin('Downloading: %s' % my_file)
+ try:
+ fp = urllib2.urlopen(url)
+ file_content = fp.read()
+ fp.close()
+ if dest != None:
+ if not os.path.exists(dest):
+ os.makedirs(dest, 0755)
+ fp = open(os.path.join(dest, my_file), 'w', 0644)
+ fp.write(file_content)
+ fp.close()
+ else:
+ if display_info:
+ out.eend(0)
+ return file_content
+ except Exception, error:
+ if display_info:
+ out.eend(1)
+ raise Exception('Failed to fetch the file (%s): %s' % (my_file, error))
+ else:
+ if display_info:
+ out.eend(0)
def add_file_to_db_cache(_file):
+ my_file = os.path.join(conf.db, 'cache.json')
+
try:
- fp = open(conf.db+'/cache.json')
+ fp = open(my_file)
files = json.load(fp)
fp.close()
except:
@@ -90,7 +108,7 @@ def add_file_to_db_cache(_file):
if re_files[f].match(_file) != None:
files['files'][f] = _file
- fp = open(conf.db+'/cache.json', 'w', 0644)
+ fp = open(my_file, 'w', 0644)
json.dump(files, fp)
fp.close()
@@ -98,34 +116,43 @@ def add_file_to_db_cache(_file):
def check_db_cache():
try:
- fp = open(conf.db+'/cache.json')
+ fp = open(os.path.join(conf.db, 'cache.json'))
cache = json.load(fp)
fp.close()
except:
cache = {'files': []}
- fp = open(conf.db+'/update.json')
+ fp = open(os.path.join(conf.db, 'update.json'))
update = json.load(fp)
fp.close()
for _file in update['files']:
- if _file not in cache['files']:
- if not os.path.exists(conf.db+'/'+_file):
- download_with_wget(conf.db_mirror+'/'+_file, conf.db+'/'+_file)
+ if _file not in cache['files'].values():
+ my_file = os.path.join(conf.db, _file)
+ if not os.path.exists(my_file):
+ download_with_wget(conf.db_mirror + '/' + _file, my_file)
add_file_to_db_cache(_file)
extract(_file)
-def extract(_file):
+def extract(gz_file, display_info=True):
- my_file = conf.db+'/'+_file
+ my_file = os.path.join(conf.db, gz_file)
if tarfile.is_tarfile(my_file):
- fp = tarfile.open(my_file, 'r:gz')
- fp.extractall(conf.db)
-
+ if display_info:
+ out.ebegin('Extracting: %s' % os.path.basename(gz_file))
+ try:
+ fp = tarfile.open(my_file, 'r:gz')
+ fp.extractall(conf.db)
+ except Exception, error:
+ if display_info:
+ out.eend(1)
+ raise Exception('Failed to extract the file (%s): %s' % (my_file, error))
+ else:
+ if display_info:
+ out.eend(0)
+
if __name__ == '__main__':
- check_updates()
- download_files()
- check_db_cache()
+ download_files(check_updates())