Rietveld Code Review Tool
Help | Bug tracker | Discussion group | Source code

Delta Between Two Patch Sets: run.py

Issue 29324625: Issue 2951 - Fix handling of URLs without a scheme (Closed)
Left Patch Set: Created Aug. 25, 2015, 8:25 p.m.
Right Patch Set: Created Oct. 9, 2015, 10:26 a.m.
Left:
Right:
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
Left: Side by side diff | Download
Right: Side by side diff | Download
« no previous file with change/comment | « no previous file | no next file » | no next file with change/comment »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
LEFTRIGHT
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # coding: utf-8 2 # coding: utf-8
3 3
4 import argparse 4 import argparse
5 import datetime 5 import datetime
6 import errno 6 import errno
7 import hashlib 7 import hashlib
8 import io 8 import io
9 import json 9 import json
10 import os 10 import os
(...skipping 28 matching lines...) Expand all
39 elif path == '/save': 39 elif path == '/save':
40 try: 40 try:
41 request_body_size = int(environ.get('CONTENT_LENGTH', 0)) 41 request_body_size = int(environ.get('CONTENT_LENGTH', 0))
42 except (ValueError): 42 except (ValueError):
43 start_response('400 Bad Request', []) 43 start_response('400 Bad Request', [])
44 return '' 44 return ''
45 45
46 data = json.loads(environ['wsgi.input'].read(request_body_size)) 46 data = json.loads(environ['wsgi.input'].read(request_body_size))
47 self.urls.remove(data['url']) 47 self.urls.remove(data['url'])
48 48
49 fullurl = data['url'] if ':' in data['url'] else 'http://' + data['url'] 49 fullurl = data['url']
Sebastian Noack 2015/08/26 13:11:52 Please don't repeat yourself: fullurl = data['u
Wladimir Palant 2015/10/09 10:27:34 Done.
50 if not urlparse.urlparse(fullurl).scheme:
51 fullurl = 'http://' + fullurl
50 parsedurl = urlparse.urlparse(fullurl) 52 parsedurl = urlparse.urlparse(fullurl)
51 urlhash = hashlib.new('md5', data['url']).hexdigest() 53 urlhash = hashlib.new('md5', data['url']).hexdigest()
52 timestamp = datetime.datetime.fromtimestamp(data['startTime'] / 1000.0).st rftime('%Y-%m-%dT%H%M%S.%f') 54 timestamp = datetime.datetime.fromtimestamp(data['startTime'] / 1000.0).st rftime('%Y-%m-%dT%H%M%S.%f')
53 basename = "%s-%s-%s" % (parsedurl.hostname, timestamp, urlhash) 55 basename = "%s-%s-%s" % (parsedurl.hostname, timestamp, urlhash)
54 datapath = os.path.join(self.parameters.outdir, basename + ".json") 56 datapath = os.path.join(self.parameters.outdir, basename + ".json")
55 screenshotpath = os.path.join(self.parameters.outdir, basename + ".jpg") 57 screenshotpath = os.path.join(self.parameters.outdir, basename + ".jpg")
56 sourcepath = os.path.join(self.parameters.outdir, basename + ".xml") 58 sourcepath = os.path.join(self.parameters.outdir, basename + ".xml")
57 59
58 try: 60 try:
59 os.makedirs(self.parameters.outdir) 61 os.makedirs(self.parameters.outdir)
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after
187 BASE_DIR = os.path.dirname(os.path.abspath(__file__)) 189 BASE_DIR = os.path.dirname(os.path.abspath(__file__))
188 DEPENDENCY_SCRIPT = os.path.join(BASE_DIR, "ensure_dependencies.py") 190 DEPENDENCY_SCRIPT = os.path.join(BASE_DIR, "ensure_dependencies.py")
189 191
190 try: 192 try:
191 subprocess.check_call([sys.executable, DEPENDENCY_SCRIPT, BASE_DIR]) 193 subprocess.check_call([sys.executable, DEPENDENCY_SCRIPT, BASE_DIR])
192 except subprocess.CalledProcessError as e: 194 except subprocess.CalledProcessError as e:
193 print >>sys.stderr, e 195 print >>sys.stderr, e
194 print >>sys.stderr, "Failed to ensure dependencies being up-to-date!" 196 print >>sys.stderr, "Failed to ensure dependencies being up-to-date!"
195 197
196 run() 198 run()
LEFTRIGHT
« no previous file | no next file » | Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Toggle Comments ('s')

Powered by Google App Engine
This is Rietveld