114114@functools .total_ordering
115115class S3Object :
116116 key : str
117- checksum : str | None
117+ checksum : Optional [ str ]
118118
119119 def __str__ (self ):
120120 return self .key
@@ -150,7 +150,7 @@ def __init__(self: S3IndexType, objects: List[S3Object], prefix: str) -> None:
150150 # should dynamically grab subdirectories like whl/test/cu101
151151 # so we don't need to add them manually anymore
152152 self .subdirs = {
153- path .dirname (obj ) for obj in objects if path .dirname != prefix
153+ path .dirname (obj . key ) for obj in objects if path .dirname != prefix
154154 }
155155
156156 def nightly_packages_to_show (self : S3IndexType ) -> Set [S3Object ]:
@@ -194,7 +194,7 @@ def nightly_packages_to_show(self: S3IndexType) -> Set[S3Object]:
194194 })
195195
196196 def is_obj_at_root (self , obj : S3Object ) -> bool :
197- return path .dirname (str ( obj ) ) == self .prefix
197+ return path .dirname (obj . key ) == self .prefix
198198
199199 def _resolve_subdir (self , subdir : Optional [str ] = None ) -> str :
200200 if not subdir :
@@ -216,7 +216,7 @@ def gen_file_list(
216216 if package_name is not None :
217217 if self .obj_to_package_name (obj ) != package_name :
218218 continue
219- if self .is_obj_at_root (obj ) or str ( obj ) .startswith (subdir ):
219+ if self .is_obj_at_root (obj ) or obj . key .startswith (subdir ):
220220 yield obj
221221
222222 def get_package_names (self , subdir : Optional [str ] = None ) -> List [str ]:
@@ -228,11 +228,11 @@ def normalize_package_version(self: S3IndexType, obj: S3Object) -> str:
228228 return sub (
229229 r"%2B.*" ,
230230 "" ,
231- "-" .join (path .basename (str ( obj ) ).split ("-" )[:2 ])
231+ "-" .join (path .basename (obj . key ).split ("-" )[:2 ])
232232 )
233233
234234 def obj_to_package_name (self , obj : S3Object ) -> str :
235- return path .basename (str ( obj ) ).split ('-' , 1 )[0 ]
235+ return path .basename (obj . key ).split ('-' , 1 )[0 ]
236236
237237 def to_legacy_html (
238238 self ,
@@ -250,15 +250,15 @@ def to_legacy_html(
250250 is_root = subdir == self .prefix
251251 for obj in self .gen_file_list (subdir ):
252252 # Strip our prefix
253- sanitized_obj = obj .replace (subdir , "" , 1 )
253+ sanitized_obj = obj .key . replace (subdir , "" , 1 )
254254 if sanitized_obj .startswith ('/' ):
255255 sanitized_obj = sanitized_obj .lstrip ("/" )
256256 # we include objects at our root prefix so that users can still
257257 # install packages like torchaudio / torchtext even if they want
258258 # to install a specific GPU arch of torch / torchvision
259259 if not is_root and self .is_obj_at_root (obj ):
260260 # strip root prefix
261- sanitized_obj = obj .replace (self .prefix , "" , 1 ).lstrip ("/" )
261+ sanitized_obj = obj .key . replace (self .prefix , "" , 1 ).lstrip ("/" )
262262 sanitized_obj = f"../{ sanitized_obj } "
263263 out .append (f'<a href="{ sanitized_obj } ">{ sanitized_obj } </a><br/>' )
264264 return "\n " .join (sorted (out ))
@@ -278,7 +278,7 @@ def to_simple_package_html(
278278 out .append (' <h1>Links for {}</h1>' .format (package_name .lower ().replace ("_" ,"-" )))
279279 for obj in sorted (self .gen_file_list (subdir , package_name )):
280280 maybe_fragment = f"#sha256={ obj .checksum } " if obj .checksum else ""
281- out .append (f' <a href="/{ obj } { maybe_fragment } ">{ path .basename (obj ).replace ("%2B" ,"+" )} </a><br/>' )
281+ out .append (f' <a href="/{ obj . key } { maybe_fragment } ">{ path .basename (obj . key ).replace ("%2B" ,"+" )} </a><br/>' )
282282 # Adding html footer
283283 out .append (' </body>' )
284284 out .append ('</html>' )
@@ -369,19 +369,20 @@ def from_S3(cls: Type[S3IndexType], prefix: str) -> S3IndexType:
369369 )
370370 for pattern in ACCEPTED_SUBDIR_PATTERNS
371371 ]) and obj .key .endswith (ACCEPTED_FILE_EXTENSIONS )
372- if is_acceptable :
373- # Add PEP 503-compatible hashes to URLs to allow clients to avoid spurious downloads, if possible.
374- response = obj .meta .client .head_object (Bucket = BUCKET .name , Key = obj .key , ChecksumMode = "ENABLED" )
375- sha256 = (_b64 := response .get ("ChecksumSHA256" )) and base64 .b64decode (_b64 ).hex ()
376- # For older files, rely on checksumsha256 metadata that can be added to the file later
377- if sha256 is None :
378- sha256 = response .get ("Metadata" , {}).get ("checksumsha256" )
379- sanitized_key = obj .key .replace ("+" , "%2B" )
380- s3_object = S3Object (
381- key = sanitized_key ,
382- checksum = sha256 ,
383- )
384- objects .append (s3_object )
372+ if not is_acceptable :
373+ continue
374+ # Add PEP 503-compatible hashes to URLs to allow clients to avoid spurious downloads, if possible.
375+ response = obj .meta .client .head_object (Bucket = BUCKET .name , Key = obj .key , ChecksumMode = "ENABLED" )
376+ sha256 = (_b64 := response .get ("ChecksumSHA256" )) and base64 .b64decode (_b64 ).hex ()
377+ # For older files, rely on checksum-sha256 metadata that can be added to the file later
378+ if sha256 is None :
379+ sha256 = response .get ("Metadata" , {}).get ("checksum-sha256" )
380+ sanitized_key = obj .key .replace ("+" , "%2B" )
381+ s3_object = S3Object (
382+ key = sanitized_key ,
383+ checksum = sha256 ,
384+ )
385+ objects .append (s3_object )
385386 return cls (objects , prefix )
386387
387388
0 commit comments