Mercurial > p > roundup > code
comparison scripts/import_sf.py @ 5402:88dbacd11cd1
Python 3 preparation: update urllib / urllib2 / urlparse imports.
The existing roundup/anypy/urllib_.py is extended to cover more
imports and used in more places. Manual patch.
| author | Joseph Myers <jsm@polyomino.org.uk> |
|---|---|
| date | Tue, 24 Jul 2018 23:48:30 +0000 |
| parents | 0942fe89e82e |
| children | f004824563a3 |
comparison
equal
deleted
inserted
replaced
| 5401:4cf48ff01e04 | 5402:88dbacd11cd1 |
|---|---|
| 19 roundup-admin -i <tracker home> import /tmp/imported | 19 roundup-admin -i <tracker home> import /tmp/imported |
| 20 | 20 |
| 21 And you're done! | 21 And you're done! |
| 22 """ | 22 """ |
| 23 | 23 |
| 24 import sys, os, csv, time, urllib2, httplib, mimetypes, urlparse | 24 import sys, os, csv, time, httplib, mimetypes |
| 25 | 25 |
| 26 try: | 26 try: |
| 27 import cElementTree as ElementTree | 27 import cElementTree as ElementTree |
| 28 except ImportError: | 28 except ImportError: |
| 29 from elementtree import ElementTree | 29 from elementtree import ElementTree |
| 30 | 30 |
| 31 from roundup import instance, hyperdb, date, support, password | 31 from roundup import instance, hyperdb, date, support, password |
| 32 from roundup.anypy import urllib_ | |
| 32 | 33 |
| 33 today = date.Date('.') | 34 today = date.Date('.') |
| 34 | 35 |
| 35 DL_URL = 'http://sourceforge.net/tracker/download.php?group_id=%(group_id)s&atid=%(atid)s&aid=%(aid)s' | 36 DL_URL = 'http://sourceforge.net/tracker/download.php?group_id=%(group_id)s&atid=%(atid)s&aid=%(aid)s' |
| 36 | 37 |
| 44 response = conn.getresponse() | 45 response = conn.getresponse() |
| 45 # which should respond with a redirect to the correct url which has the | 46 # which should respond with a redirect to the correct url which has the |
| 46 # magic "group_id" and "atid" values in it that we need | 47 # magic "group_id" and "atid" values in it that we need |
| 47 assert response.status == 302, 'response code was %s'%response.status | 48 assert response.status == 302, 'response code was %s'%response.status |
| 48 location = response.getheader('location') | 49 location = response.getheader('location') |
| 49 query = urlparse.urlparse(response.getheader('location'))[-2] | 50 query = urllib_.urlparse(response.getheader('location'))[-2] |
| 50 info = dict([param.split('=') for param in query.split('&')]) | 51 info = dict([param.split('=') for param in query.split('&')]) |
| 51 return DL_URL%info | 52 return DL_URL%info |
| 52 | 53 |
| 53 def fetch_files(xml_file, file_dir): | 54 def fetch_files(xml_file, file_dir): |
| 54 """ Fetch files referenced in the xml_file into the dir file_dir. """ | 55 """ Fetch files referenced in the xml_file into the dir file_dir. """ |
| 89 urls[aid] = get_url(aid) | 90 urls[aid] = get_url(aid) |
| 90 f = open(os.path.join(file_dir, 'urls.txt'), 'a') | 91 f = open(os.path.join(file_dir, 'urls.txt'), 'a') |
| 91 f.write('%s %s\n'%(aid, urls[aid])) | 92 f.write('%s %s\n'%(aid, urls[aid])) |
| 92 f.close() | 93 f.close() |
| 93 url = urls[aid] + '&file_id=' + fid | 94 url = urls[aid] + '&file_id=' + fid |
| 94 f = urllib2.urlopen(url) | 95 f = urllib_.urlopen(url) |
| 95 data = f.read() | 96 data = f.read() |
| 96 n = open(os.path.join(file_dir, fid), 'w') | 97 n = open(os.path.join(file_dir, fid), 'w') |
| 97 n.write(data) | 98 n.write(data) |
| 98 f.close() | 99 f.close() |
| 99 n.close() | 100 n.close() |
