@@ -6,12 +6,13 @@ import os
6
6
import subprocess
7
7
import sys
8
8
from pathlib import Path
9
- from typing import List , Optional
9
+ from typing import Dict , List , Optional
10
10
11
11
SCRIPT_PATH = Path (__file__ ).absolute ()
12
12
IMAGE_TYPE = "binary-builder"
13
13
IMAGE_NAME = f"altinityinfra/{ IMAGE_TYPE } "
14
-
14
+ DEFAULT_TMP_PATH = SCRIPT_PATH .parent .absolute () / 'tmp'
15
+ TEMP_PATH = Path (os .getenv ("TEMP_PATH" , DEFAULT_TMP_PATH ))
15
16
16
17
class BuildException (Exception ):
17
18
pass
@@ -82,9 +83,22 @@ def run_docker_image_with_env(
82
83
ch_root : Path ,
83
84
cargo_cache_dir : Path ,
84
85
ccache_dir : Optional [Path ],
86
+ aws_secrets : Optional [Dict [str ,str ]]
85
87
) -> None :
86
88
output_dir .mkdir (parents = True , exist_ok = True )
87
89
cargo_cache_dir .mkdir (parents = True , exist_ok = True )
90
+ extra_parts = ""
91
+
92
+ if aws_secrets :
93
+ # Pass AWS credentials via file rather than via env to avoid leaking secrets
94
+ env_part = {"AWS_CONFIG_FILE" : "/home/clickhouse/.aws/credentials" }
95
+ host_aws_config_file_path = Path (TEMP_PATH ) / 'aws_config'
96
+ with open (host_aws_config_file_path , 'wt' ) as f :
97
+ f .write ("[default]" )
98
+ for key , value in aws_secrets .items ():
99
+ f .write (f"\n { key } ={ value } " )
100
+
101
+ extra_parts = f"--volume={ host_aws_config_file_path } :{ env_part ['AWS_CONFIG_FILE' ]} "
88
102
89
103
env_part = " -e " .join (env_variables )
90
104
if env_part :
@@ -107,6 +121,7 @@ def run_docker_image_with_env(
107
121
cmd = (
108
122
f"docker run --network=host --user={ user } --rm { ccache_mount } "
109
123
f"--volume={ output_dir } :/output --volume={ ch_root } :/build { env_part } "
124
+ f" { extra_parts } "
110
125
f"--volume={ cargo_cache_dir } :/rust/cargo/registry { interactive } { image_name } "
111
126
)
112
127
@@ -130,11 +145,9 @@ def parse_env_variables(
130
145
sanitizer : str ,
131
146
package_type : str ,
132
147
cache : str ,
133
- s3_access_key_id : str ,
134
148
s3_bucket : str ,
135
149
s3_directory : str ,
136
150
s3_rw_access : bool ,
137
- s3_secret_access_key : str ,
138
151
clang_tidy : bool ,
139
152
version : str ,
140
153
official : bool ,
@@ -323,10 +336,6 @@ def parse_env_variables(
323
336
result .append (f"SCCACHE_S3_KEY_PREFIX={ sccache_dir } " )
324
337
if not s3_rw_access :
325
338
result .append ("SCCACHE_S3_NO_CREDENTIALS=true" )
326
- if s3_access_key_id :
327
- result .append (f"AWS_ACCESS_KEY_ID={ s3_access_key_id } " )
328
- if s3_secret_access_key :
329
- result .append (f"AWS_SECRET_ACCESS_KEY={ s3_secret_access_key } " )
330
339
331
340
if clang_tidy :
332
341
# `CTCACHE_DIR` has the same purpose as the `CCACHE_DIR` above.
@@ -544,11 +553,9 @@ def main() -> None:
544
553
args .sanitizer ,
545
554
args .package_type ,
546
555
args .cache ,
547
- args .s3_access_key_id ,
548
556
args .s3_bucket ,
549
557
args .s3_directory ,
550
558
args .s3_rw_access ,
551
- args .s3_secret_access_key ,
552
559
args .clang_tidy ,
553
560
args .version ,
554
561
args .official ,
@@ -567,6 +574,10 @@ def main() -> None:
567
574
ch_root ,
568
575
args .cargo_cache_dir ,
569
576
args .ccache_dir ,
577
+ {
578
+ "aws_access_key_id" : args .s3_access_key_id ,
579
+ "aws_secret_access_key" : args .s3_secret_access_key
580
+ }
570
581
)
571
582
logging .info ("Output placed into %s" , args .output_dir )
572
583
0 commit comments