OLD | NEW |
1 # This Source Code Form is subject to the terms of the Mozilla Public | 1 # This Source Code Form is subject to the terms of the Mozilla Public |
2 # License, v. 2.0. If a copy of the MPL was not distributed with this | 2 # License, v. 2.0. If a copy of the MPL was not distributed with this |
3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. | 3 # file, You can obtain one at http://mozilla.org/MPL/2.0/. |
4 | 4 |
5 import errno | 5 import errno |
6 import io | 6 import io |
7 import json | 7 import json |
8 import os | 8 import os |
9 import re | 9 import re |
10 import shutil | |
11 from StringIO import StringIO | 10 from StringIO import StringIO |
12 import struct | 11 import struct |
13 import subprocess | 12 import subprocess |
14 import sys | 13 import sys |
15 import tempfile | |
16 | 14 |
17 from ensure_dependencies import read_deps | 15 from ensure_dependencies import read_deps |
18 from packager import (readMetadata, getDefaultFileName, getBuildVersion, | 16 from packager import (readMetadata, getDefaultFileName, getBuildVersion, |
19 getTemplate, Files) | 17 getTemplate, Files) |
20 | 18 |
21 defaultLocale = 'en_US' | 19 defaultLocale = 'en_US' |
22 | 20 |
23 | 21 |
24 def getIgnoredFiles(params): | 22 def getIgnoredFiles(params): |
25 return {'store.description'} | 23 return {'store.description'} |
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
135 # Normalize JSON structure | 133 # Normalize JSON structure |
136 licenseComment = re.compile(r'/\*.*?\*/', re.S) | 134 licenseComment = re.compile(r'/\*.*?\*/', re.S) |
137 data = json.loads(re.sub(licenseComment, '', manifest, 1)) | 135 data = json.loads(re.sub(licenseComment, '', manifest, 1)) |
138 if '_dummy' in data: | 136 if '_dummy' in data: |
139 del data['_dummy'] | 137 del data['_dummy'] |
140 manifest = json.dumps(data, sort_keys=True, indent=2) | 138 manifest = json.dumps(data, sort_keys=True, indent=2) |
141 | 139 |
142 return manifest.encode('utf-8') | 140 return manifest.encode('utf-8') |
143 | 141 |
144 | 142 |
| 143 def toJson(data): |
| 144 return json.dumps( |
| 145 data, ensure_ascii=False, sort_keys=True, |
| 146 indent=2, separators=(',', ': ') |
| 147 ).encode('utf-8') + '\n' |
| 148 |
| 149 |
145 def create_bundles(params, files): | 150 def create_bundles(params, files): |
146 base_extension_path = params['baseDir'] | 151 base_extension_path = params['baseDir'] |
147 info_templates = { | 152 info_templates = { |
148 'chrome': 'chromeInfo.js.tmpl', | 153 'chrome': 'chromeInfo.js.tmpl', |
149 'edge': 'edgeInfo.js.tmpl', | 154 'edge': 'edgeInfo.js.tmpl', |
150 'gecko-webext': 'geckoInfo.js.tmpl' | 155 'gecko-webext': 'geckoInfo.js.tmpl' |
151 } | 156 } |
152 info_module = None | |
153 | 157 |
154 # Historically we didn't use relative paths when requiring modules, so in | 158 # Historically we didn't use relative paths when requiring modules, so in |
155 # order for webpack to know where to find them we need to pass in a list of | 159 # order for webpack to know where to find them we need to pass in a list of |
156 # resolve paths. Going forward we should always use relative paths, once we | 160 # resolve paths. Going forward we should always use relative paths, once we |
157 # do that consistently this can be removed. See issues 5760, 5761 and 5762. | 161 # do that consistently this can be removed. See issues 5760, 5761 and 5762. |
158 resolve_paths = ' '.join( | 162 resolve_paths = [os.path.join(base_extension_path, dir, 'lib') |
159 [os.path.join(base_extension_path, dir, 'lib') | 163 for dir in ['', 'adblockpluscore', 'adblockplusui']] |
160 for dir in ['', 'adblockpluscore', 'adblockplusui']] | |
161 ) | |
162 | 164 |
163 temp_dir = tempfile.mkdtemp() | 165 info_template = getTemplate(info_templates[params['type']]) |
164 try: | 166 info_module = info_template.render( |
165 info_module = os.path.join(temp_dir, 'info.js') | 167 basename=params['metadata'].get('general', 'basename'), |
166 template = getTemplate(info_templates[params['type']]) | 168 version=params['metadata'].get('general', 'version') |
167 with open(info_module, 'w') as info_file: | 169 ).encode('utf-8') |
168 info_file.write( | |
169 template.render( | |
170 basename=params['metadata'].get('general', 'basename'), | |
171 version=params['metadata'].get('general', 'version') | |
172 ).encode('utf-8') | |
173 ) | |
174 | 170 |
175 for item in params['metadata'].items('bundles'): | 171 boundary = '=============================WEBPACK-BOUNDARY' |
176 name, value = item | 172 configuration = { |
177 base_item_path = os.path.dirname(item.source) | 173 'BOUNDARY': boundary, |
| 174 'BUNDLES': [], |
| 175 'EXTENSION_PATH': base_extension_path, |
| 176 'INFO_MODULE': info_module, |
| 177 'RESOLVE_PATHS': resolve_paths, |
| 178 } |
178 | 179 |
179 bundle_file = os.path.relpath(os.path.join(base_item_path, name), | 180 for item in params['metadata'].items('bundles'): |
180 base_extension_path) | 181 name, value = item |
181 entry_files = [os.path.join(base_item_path, module_path) | 182 base_item_path = os.path.dirname(item.source) |
182 for module_path in value.split()] | |
183 subprocess.check_call( | |
184 ['npm', 'run-script', 'webpack', '--silent'], | |
185 cwd=os.path.dirname(__file__), | |
186 env={ | |
187 'EXTENSION_PATH': base_extension_path, | |
188 'ENTRY_POINTS': ' '.join(entry_files), | |
189 'OUTPUT_PATH': temp_dir, | |
190 'BUNDLE_NAME': bundle_file, | |
191 'RESOLVE_PATHS': resolve_paths, | |
192 'INFO_PATH': info_module, | |
193 'PATH': os.environ['PATH'] | |
194 } | |
195 ) | |
196 for file_name in [bundle_file, bundle_file + '.map']: | |
197 with open(os.path.join(temp_dir, file_name), 'r') as f: | |
198 files[file_name] = f.read() | |
199 finally: | |
200 shutil.rmtree(temp_dir) | |
201 | 183 |
| 184 bundle_file = os.path.relpath(os.path.join(base_item_path, name), |
| 185 base_extension_path) |
| 186 entry_files = [os.path.join(base_item_path, module_path) |
| 187 for module_path in value.split()] |
| 188 configuration['BUNDLES'].append({ |
| 189 'BUNDLE_NAME': bundle_file, |
| 190 'ENTRY_POINTS': entry_files, |
| 191 }) |
202 | 192 |
203 def toJson(data): | 193 output = subprocess.check_output( |
204 return json.dumps( | 194 ['node', |
205 data, ensure_ascii=False, sort_keys=True, | 195 os.path.join(os.path.dirname(__file__), 'webpack_runner.js'), |
206 indent=2, separators=(',', ': ') | 196 toJson(configuration)] |
207 ).encode('utf-8') + '\n' | 197 ).split(boundary) |
| 198 for i in range(0, len(output)-1, 2): |
| 199 files[output[i].strip()] = output[i+1].strip() |
208 | 200 |
209 | 201 |
210 def import_string_webext(data, key, source): | 202 def import_string_webext(data, key, source): |
211 """Import a single translation from the source dictionary into data""" | 203 """Import a single translation from the source dictionary into data""" |
212 data[key] = source | 204 data[key] = source |
213 | 205 |
214 | 206 |
215 def import_string_gecko(data, key, value): | 207 def import_string_gecko(data, key, value): |
216 """Import Gecko-style locales into data. | 208 """Import Gecko-style locales into data. |
217 | 209 |
(...skipping 215 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
433 params, 'testIndex.html.tmpl', ('general', 'testScripts') | 425 params, 'testIndex.html.tmpl', ('general', 'testScripts') |
434 ) | 426 ) |
435 | 427 |
436 zipdata = files.zipToString() | 428 zipdata = files.zipToString() |
437 signature = None | 429 signature = None |
438 pubkey = None | 430 pubkey = None |
439 if keyFile != None: | 431 if keyFile != None: |
440 signature = signBinary(zipdata, keyFile) | 432 signature = signBinary(zipdata, keyFile) |
441 pubkey = getPublicKey(keyFile) | 433 pubkey = getPublicKey(keyFile) |
442 writePackage(outFile, pubkey, signature, zipdata) | 434 writePackage(outFile, pubkey, signature, zipdata) |
OLD | NEW |