Skip to content

Commit 204b53e

Browse files
build: upload to AZ as well as S3 (electron#33573)
* build: upload to AZ aswell as S3 * fix: provide env to azput
1 parent 0ac6d74 commit 204b53e

File tree

10 files changed

+345
-93
lines changed

10 files changed

+345
-93
lines changed

package.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
"repository": "https://github.com/electron/electron",
55
"description": "Build cross platform desktop apps with JavaScript, HTML, and CSS",
66
"devDependencies": {
7+
"@azure/storage-blob": "^12.9.0",
78
"@electron/docs-parser": "^0.12.4",
89
"@electron/typescript-definitions": "^8.9.5",
910
"@octokit/auth-app": "^2.10.0",
@@ -141,4 +142,4 @@
141142
"node script/gen-hunspell-filenames.js"
142143
]
143144
}
144-
}
145+
}

script/lib/azput.js

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
/* eslint-disable camelcase */
2+
const { BlobServiceClient } = require('@azure/storage-blob');
3+
const fs = require('fs');
4+
const path = require('path');
5+
6+
const blobServiceClient = BlobServiceClient.fromConnectionString(process.env.ELECTRON_ARTIFACTS_BLOB_STORAGE);
7+
8+
const args = require('minimist')(process.argv.slice(2));
9+
10+
let { prefix = '/', key_prefix = '', _: files } = args;
11+
if (prefix && !prefix.endsWith(path.sep)) prefix = path.resolve(prefix) + path.sep;
12+
13+
function filenameToKey (file) {
14+
file = path.resolve(file);
15+
if (file.startsWith(prefix)) file = file.substr(prefix.length - 1);
16+
return key_prefix + (path.sep === '\\' ? file.replace(/\\/g, '/') : file);
17+
}
18+
19+
let anErrorOccurred = false;
20+
function next (done) {
21+
const file = files.shift();
22+
if (!file) return done();
23+
let key = filenameToKey(file);
24+
// TODO: When we drop s3put, migrate the key to not include atom-shell in the callsites
25+
key = key.replace('atom-shell/dist/', 'headers/dist/');
26+
key = key.replace('atom-shell/symbols/', 'symbols/');
27+
key = key.replace('atom-shell/tmp/', 'checksums-scratchpad/');
28+
key = key.replace('electron-artifacts/', 'release-builds/');
29+
30+
const [containerName, ...keyPath] = key.split('/');
31+
const blobKey = keyPath.join('/');
32+
console.log(`Uploading '${file}' to container '${containerName}' with key '${blobKey}'...`);
33+
34+
const containerClient = blobServiceClient.getContainerClient(containerName);
35+
const blockBlobClient = containerClient.getBlockBlobClient(blobKey);
36+
blockBlobClient.uploadFile(file)
37+
.then((uploadBlobResponse) => {
38+
console.log(`Upload block blob ${blobKey} successfully: https://artifacts.electronjs.org/${key}`, uploadBlobResponse.requestId);
39+
})
40+
.catch((err) => {
41+
console.error(err);
42+
anErrorOccurred = true;
43+
})
44+
.then(() => next(done));
45+
}
46+
next(() => {
47+
process.exit(anErrorOccurred ? 1 : 0);
48+
});

script/lib/util.py

Lines changed: 19 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
from urllib2 import urlopen
1616
import zipfile
1717

18-
from lib.config import is_verbose_mode
18+
from lib.config import is_verbose_mode, s3_config
1919

2020
ELECTRON_DIR = os.path.abspath(
2121
os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
@@ -155,7 +155,14 @@ def get_electron_version():
155155
with open(version_file) as f:
156156
return 'v' + f.read().strip()
157157

158-
def s3put(bucket, access_key, secret_key, prefix, key_prefix, files):
158+
def store_artifact(prefix, key_prefix, files):
159+
# Legacy S3 Bucket
160+
s3put(prefix, key_prefix, files)
161+
# New AZ Storage
162+
azput(prefix, key_prefix, files)
163+
164+
def s3put(prefix, key_prefix, files):
165+
bucket, access_key, secret_key = s3_config()
159166
env = os.environ.copy()
160167
env['AWS_ACCESS_KEY_ID'] = access_key
161168
env['AWS_SECRET_ACCESS_KEY'] = secret_key
@@ -169,6 +176,16 @@ def s3put(bucket, access_key, secret_key, prefix, key_prefix, files):
169176
] + files, env)
170177
print(output)
171178

179+
def azput(prefix, key_prefix, files):
180+
env = os.environ.copy()
181+
output = execute([
182+
'node',
183+
os.path.join(os.path.dirname(__file__), 'azput.js'),
184+
'--prefix', prefix,
185+
'--key_prefix', key_prefix,
186+
] + files, env)
187+
print(output)
188+
172189
def get_out_dir():
173190
out_dir = 'Debug'
174191
override = os.environ.get('ELECTRON_OUT_DIR')

script/release/release.js

Lines changed: 47 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,8 @@ const pkgVersion = `v${pkg.version}`;
1717
const path = require('path');
1818
const temp = require('temp').track();
1919
const { URL } = require('url');
20+
const { BlobServiceClient } = require('@azure/storage-blob');
2021
const { Octokit } = require('@octokit/rest');
21-
const AWS = require('aws-sdk');
2222

2323
require('colors');
2424
const pass = '✓'.green;
@@ -80,6 +80,8 @@ async function validateReleaseAssets (release, validatingRelease) {
8080
}
8181
const s3RemoteFiles = s3RemoteFilesForVersion(release.tag_name);
8282
await verifyShasumsForRemoteFiles(s3RemoteFiles, true);
83+
const azRemoteFiles = azRemoteFilesForVersion(release.tag_name);
84+
await verifyShasumsForRemoteFiles(s3RemoteFiles, true);
8385
}
8486
}
8587

@@ -181,26 +183,36 @@ function assetsForVersion (version, validatingRelease) {
181183
return patterns;
182184
}
183185

186+
const cloudStoreFilePaths = (version) => [
187+
`iojs-${version}-headers.tar.gz`,
188+
`iojs-${version}.tar.gz`,
189+
`node-${version}.tar.gz`,
190+
'node.lib',
191+
'x64/node.lib',
192+
'win-x64/iojs.lib',
193+
'win-x86/iojs.lib',
194+
'win-arm64/iojs.lib',
195+
'win-x64/node.lib',
196+
'win-x86/node.lib',
197+
'win-arm64/node.lib',
198+
'arm64/node.lib',
199+
'SHASUMS.txt',
200+
'SHASUMS256.txt'
201+
];
202+
184203
function s3RemoteFilesForVersion (version) {
185204
const bucket = 'https://gh-contractor-zcbenz.s3.amazonaws.com/';
186205
const versionPrefix = `${bucket}atom-shell/dist/${version}/`;
187-
const filePaths = [
188-
`iojs-${version}-headers.tar.gz`,
189-
`iojs-${version}.tar.gz`,
190-
`node-${version}.tar.gz`,
191-
'node.lib',
192-
'x64/node.lib',
193-
'win-x64/iojs.lib',
194-
'win-x86/iojs.lib',
195-
'win-arm64/iojs.lib',
196-
'win-x64/node.lib',
197-
'win-x86/node.lib',
198-
'win-arm64/node.lib',
199-
'arm64/node.lib',
200-
'SHASUMS.txt',
201-
'SHASUMS256.txt'
202-
];
203-
return filePaths.map((filePath) => ({
206+
return cloudStoreFilePaths(version).map((filePath) => ({
207+
file: filePath,
208+
url: `${versionPrefix}${filePath}`
209+
}));
210+
}
211+
212+
function azRemoteFilesForVersion (version) {
213+
const azCDN = 'https://artifacts.electronjs.org/headers/';
214+
const versionPrefix = `${azCDN}dist/${version}/`;
215+
return cloudStoreFilePaths(version).map((filePath) => ({
204216
file: filePath,
205217
url: `${versionPrefix}${filePath}`
206218
}));
@@ -221,49 +233,39 @@ function runScript (scriptName, scriptArgs, cwd) {
221233
}
222234

223235
function uploadNodeShasums () {
224-
console.log('Uploading Node SHASUMS file to S3.');
236+
console.log('Uploading Node SHASUMS file to artifacts.electronjs.org.');
225237
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-node-checksums.py');
226238
runScript(scriptPath, ['-v', pkgVersion]);
227-
console.log(`${pass} Done uploading Node SHASUMS file to S3.`);
239+
console.log(`${pass} Done uploading Node SHASUMS file to artifacts.electronjs.org.`);
228240
}
229241

230242
function uploadIndexJson () {
231-
console.log('Uploading index.json to S3.');
243+
console.log('Uploading index.json to artifacts.electronjs.org.');
232244
const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-index-json.py');
233245
runScript(scriptPath, [pkgVersion]);
234-
console.log(`${pass} Done uploading index.json to S3.`);
246+
console.log(`${pass} Done uploading index.json to artifacts.electronjs.org.`);
235247
}
236248

237249
async function mergeShasums (pkgVersion) {
238-
// Download individual checksum files for Electron zip files from S3,
250+
// Download individual checksum files for Electron zip files from artifact storage,
239251
// concatenate them, and upload to GitHub.
240252

241-
const bucket = process.env.ELECTRON_S3_BUCKET;
242-
const accessKeyId = process.env.ELECTRON_S3_ACCESS_KEY;
243-
const secretAccessKey = process.env.ELECTRON_S3_SECRET_KEY;
244-
if (!bucket || !accessKeyId || !secretAccessKey) {
245-
throw new Error('Please set the $ELECTRON_S3_BUCKET, $ELECTRON_S3_ACCESS_KEY, and $ELECTRON_S3_SECRET_KEY environment variables');
253+
const connectionString = process.env.ELECTRON_ARTIFACTS_BLOB_STORAGE;
254+
if (!connectionString) {
255+
throw new Error('Please set the $ELECTRON_ARTIFACTS_BLOB_STORAGE environment variable');
246256
}
247257

248-
const s3 = new AWS.S3({
249-
apiVersion: '2006-03-01',
250-
accessKeyId,
251-
secretAccessKey,
252-
region: 'us-west-2'
258+
const blobServiceClient = BlobServiceClient.fromConnectionString(connectionString);
259+
const containerClient = blobServiceClient.getContainerClient('checksums-scratchpad');
260+
const blobsIter = containerClient.listBlobsFlat({
261+
prefix: `${pkgVersion}/`
253262
});
254-
const objects = await s3.listObjectsV2({
255-
Bucket: bucket,
256-
Prefix: `atom-shell/tmp/${pkgVersion}/`,
257-
Delimiter: '/'
258-
}).promise();
259263
const shasums = [];
260-
for (const obj of objects.Contents) {
261-
if (obj.Key.endsWith('.sha256sum')) {
262-
const data = await s3.getObject({
263-
Bucket: bucket,
264-
Key: obj.Key
265-
}).promise();
266-
shasums.push(data.Body.toString('ascii').trim());
264+
for await (const blob of blobsIter) {
265+
if (blob.name.endsWith('.sha256sum')) {
266+
const blobClient = containerClient.getBlockBlobClient(blob.name);
267+
const response = await blobClient.downloadToBuffer();
268+
shasums.push(response.toString('ascii').trim());
267269
}
268270
}
269271
return shasums.join('\n');

script/release/uploaders/upload-index-json.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,8 @@
99
sys.path.append(
1010
os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + "/../.."))
1111

12-
from lib.config import s3_config
13-
from lib.util import s3put, scoped_cwd, safe_mkdir, get_out_dir, ELECTRON_DIR
12+
from lib.util import store_artifact, scoped_cwd, safe_mkdir, get_out_dir, \
13+
ELECTRON_DIR
1414

1515
OUT_DIR = get_out_dir()
1616

@@ -59,9 +59,7 @@ def main():
5959
with open(index_json, "w") as f:
6060
f.write(new_content)
6161

62-
bucket, access_key, secret_key = s3_config()
63-
s3put(bucket, access_key, secret_key, OUT_DIR, 'atom-shell/dist',
64-
[index_json])
62+
store_artifact(OUT_DIR, 'atom-shell/dist', [index_json])
6563

6664

6765
if __name__ == '__main__':

script/release/uploaders/upload-node-checksums.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,7 @@
1010
sys.path.append(
1111
os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + "/../.."))
1212

13-
from lib.config import s3_config
14-
from lib.util import download, rm_rf, s3put, safe_mkdir
13+
from lib.util import download, rm_rf, store_artifact, safe_mkdir
1514

1615
DIST_URL = 'https://electronjs.org/headers/'
1716

@@ -30,9 +29,8 @@ def main():
3029
]
3130

3231
if args.target_dir is None:
33-
bucket, access_key, secret_key = s3_config()
34-
s3put(bucket, access_key, secret_key, directory,
35-
'atom-shell/dist/{0}'.format(args.version), checksums)
32+
store_artifact(directory, 'atom-shell/dist/{0}'.format(args.version),
33+
checksums)
3634
else:
3735
copy_files(checksums, args.target_dir)
3836

script/release/uploaders/upload-node-headers.py

Lines changed: 14 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,9 @@
99
sys.path.append(
1010
os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + "/../.."))
1111

12-
from lib.config import PLATFORM, get_target_arch, s3_config
13-
from lib.util import safe_mkdir, scoped_cwd, s3put, get_out_dir, get_dist_dir
12+
from lib.config import PLATFORM, get_target_arch
13+
from lib.util import safe_mkdir, scoped_cwd, store_artifact, get_out_dir, \
14+
get_dist_dir
1415

1516
DIST_DIR = get_dist_dir()
1617
OUT_DIR = get_out_dir()
@@ -26,9 +27,8 @@
2627
def main():
2728
args = parse_args()
2829

29-
# Upload node's headers to S3.
30-
bucket, access_key, secret_key = s3_config()
31-
upload_node(bucket, access_key, secret_key, args.version)
30+
# Upload node's headers to artifact storage.
31+
upload_node(args.version)
3232

3333

3434
def parse_args():
@@ -38,17 +38,17 @@ def parse_args():
3838
return parser.parse_args()
3939

4040

41-
def upload_node(bucket, access_key, secret_key, version):
41+
def upload_node(version):
4242
with scoped_cwd(GEN_DIR):
4343
generated_tar = os.path.join(GEN_DIR, 'node_headers.tar.gz')
4444
for header_tar in HEADER_TAR_NAMES:
4545
versioned_header_tar = header_tar.format(version)
4646
shutil.copy2(generated_tar, os.path.join(GEN_DIR, versioned_header_tar))
4747

48-
s3put(bucket, access_key, secret_key, GEN_DIR,
49-
'atom-shell/dist/{0}'.format(version), glob.glob('node-*.tar.gz'))
50-
s3put(bucket, access_key, secret_key, GEN_DIR,
51-
'atom-shell/dist/{0}'.format(version), glob.glob('iojs-*.tar.gz'))
48+
store_artifact(GEN_DIR, 'atom-shell/dist/{0}'.format(version),
49+
glob.glob('node-*.tar.gz'))
50+
store_artifact(GEN_DIR, 'atom-shell/dist/{0}'.format(version),
51+
glob.glob('iojs-*.tar.gz'))
5252

5353
if PLATFORM == 'win32':
5454
if get_target_arch() == 'ia32':
@@ -73,16 +73,14 @@ def upload_node(bucket, access_key, secret_key, version):
7373
shutil.copy2(electron_lib, v4_node_lib)
7474

7575
# Upload the node.lib.
76-
s3put(bucket, access_key, secret_key, DIST_DIR,
77-
'atom-shell/dist/{0}'.format(version), [node_lib])
76+
store_artifact(DIST_DIR, 'atom-shell/dist/{0}'.format(version), [node_lib])
7877

7978
# Upload the iojs.lib.
80-
s3put(bucket, access_key, secret_key, DIST_DIR,
81-
'atom-shell/dist/{0}'.format(version), [iojs_lib])
79+
store_artifact(DIST_DIR, 'atom-shell/dist/{0}'.format(version), [iojs_lib])
8280

8381
# Upload the v4 node.lib.
84-
s3put(bucket, access_key, secret_key, DIST_DIR,
85-
'atom-shell/dist/{0}'.format(version), [v4_node_lib])
82+
store_artifact(DIST_DIR, 'atom-shell/dist/{0}'.format(version),
83+
[v4_node_lib])
8684

8785

8886
if __name__ == '__main__':

script/release/uploaders/upload-symbols.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,8 @@ def is_fs_case_sensitive():
1414
sys.path.append(
1515
os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + "/../.."))
1616

17-
from lib.config import PLATFORM, s3_config
18-
from lib.util import get_electron_branding, execute, s3put, \
17+
from lib.config import PLATFORM
18+
from lib.util import get_electron_branding, execute, store_artifact, \
1919
get_out_dir, ELECTRON_DIR
2020

2121
RELEASE_DIR = get_out_dir()
@@ -76,16 +76,15 @@ def main():
7676
for f in files:
7777
assert os.path.exists(f)
7878

79-
bucket, access_key, secret_key = s3_config()
80-
upload_symbols(bucket, access_key, secret_key, files)
79+
upload_symbols(files)
8180

8281

8382
def run_symstore(pdb, dest, product):
8483
execute(['symstore', 'add', '/r', '/f', pdb, '/s', dest, '/t', product])
8584

8685

87-
def upload_symbols(bucket, access_key, secret_key, files):
88-
s3put(bucket, access_key, secret_key, SYMBOLS_DIR, 'atom-shell/symbols',
86+
def upload_symbols(files):
87+
store_artifact(SYMBOLS_DIR, 'atom-shell/symbols',
8988
files)
9089

9190

0 commit comments

Comments
 (0)