Skip to content

Commit 4e88af9

Browse files
mrveissclaude
andcommitted
fix: Correct Python syntax errors in 5 source files (#550)
- Fix indentation error in analyze_code_vectors_for_issues.py try/except block - Fix mixed f-string/%-style logging syntax in 4 files: - advanced_web_research.py: wait_time:.2f → %.2f format specifier - adaptive_timeouts.py: elapsed:.2f → %.2f format specifier - graceful_degradation.py: confidence:.2f → %.2f format specifier - memory_optimization.py: count:, → f"{count:,}" for thousand separators All files now pass python -m py_compile verification. Closes #550 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
1 parent c743ff7 commit 4e88af9

File tree

5 files changed

+20
-20
lines changed

5 files changed

+20
-20
lines changed

analysis/analyze_code_vectors_for_issues.py

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -261,21 +261,21 @@ async def find_hardcoded_values(self, code_samples: List[Dict[str, Any]]) -> Dic
261261
matches = compiled_pattern.findall(text)
262262

263263
for match in matches:
264-
# Extract the actual value (handle tuple results from groups)
265-
value = match[0] if isinstance(match, tuple) and len(match) > 0 else match
266-
267-
# Skip common false positives
268-
if self.is_likely_hardcoded_value(category, value):
269-
hardcoded[category].append({
270-
"value": value,
271-
"file": doc_id,
272-
"context": text[max(0, text.find(value)-50):text.find(value)+100],
273-
"severity": self.assess_hardcode_severity(category, value)
274-
})
275-
276-
except Exception as e:
277-
logger.warning(f"⚠️ Pattern matching failed for {category}: {e}")
278-
continue
264+
# Extract the actual value (handle tuple results from groups)
265+
value = match[0] if isinstance(match, tuple) and len(match) > 0 else match
266+
267+
# Skip common false positives
268+
if self.is_likely_hardcoded_value(category, value):
269+
hardcoded[category].append({
270+
"value": value,
271+
"file": doc_id,
272+
"context": text[max(0, text.find(value)-50):text.find(value)+100],
273+
"severity": self.assess_hardcode_severity(category, value)
274+
})
275+
276+
except Exception as e:
277+
logger.warning(f"⚠️ Pattern matching failed for {category}: {e}")
278+
continue
279279

280280
# Remove duplicates and sort by severity
281281
for category in hardcoded:

src/agents/advanced_web_research.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -833,7 +833,7 @@ async def _respect_rate_limit(self, domain: str):
833833

834834
# Wait outside lock if needed
835835
if wait_time > 0:
836-
logger.info("Rate limiting: waiting %ss for %s", wait_time:.2f, domain)
836+
logger.info("Rate limiting: waiting %.2fs for %s", wait_time, domain)
837837
await asyncio.sleep(wait_time)
838838

839839
async def _random_delay(self, min_seconds: float, max_seconds: float):

src/utils/adaptive_timeouts.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -174,7 +174,7 @@ async def execute_with_intelligent_timeout(
174174

175175
except Exception as e:
176176
elapsed = time.time() - self.start_time
177-
logger.error("Operation failed after %ss: %s", elapsed:.2f, e)
177+
logger.error("Operation failed after %.2fs: %s", elapsed, e)
178178
return fallback_result
179179

180180
async def _send_timeout_warning(

src/utils/graceful_degradation.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -698,7 +698,7 @@ async def main():
698698
response = await manager.handle_request(request)
699699
logger.debug("\nRequest %s: %s", i+1, request)
700700
logger.debug("Response: %s...", response.content[:100])
701-
logger.debug("Source: %s, Confidence: %s", response.source, response.confidence:.2f)
701+
logger.debug("Source: %s, Confidence: %.2f", response.source, response.confidence)
702702
logger.debug("Degradation Level: %s", response.degradation_level.name)
703703

704704
# Simulate some delay between requests

src/utils/memory_optimization.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -405,12 +405,12 @@ def optimize_memory_usage():
405405
object_counts[obj_type] = object_counts.get(obj_type, 0) + 1
406406

407407
total_objects = len(gc.get_objects())
408-
logger.info("Current object count: %s total objects", total_objects:,)
408+
logger.info("Current object count: %s total objects", f"{total_objects:,}")
409409

410410
# Log top 5 object types
411411
top_objects = sorted(object_counts.items(), key=lambda x: x[1], reverse=True)[:5]
412412
for obj_type, count in top_objects:
413-
logger.info(" %s: %s instances", obj_type, count:,)
413+
logger.info(" %s: %s instances", obj_type, f"{count:,}")
414414

415415
return {
416416
"objects_collected": collected,

0 commit comments

Comments
 (0)