Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 8 additions & 6 deletions src/analysis/alpha050/exp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -330,9 +330,10 @@ pub fn analyze_exp(
.insert(name_span.end..=function_call_scope_end, fun_symbol);
}

let has_failure_handler = failable_handlers
.iter()
.any(|(modifier, _)| matches!(modifier, FailableHandler::Failure(_)));
let has_failure_handler = failable_handlers.iter().any(|(modifier, _)| {
matches!(modifier, FailableHandler::Failure(_))
|| matches!(modifier, FailableHandler::Exited(_, _, _))
});

if matches!(
scoped_generic_types.deref_type(&exp_ty),
Expand Down Expand Up @@ -568,9 +569,10 @@ pub fn analyze_exp(
is_propagating_failure |= is_prop;
return_types.extend(failure_return_ty);

let has_failure_handler = failable_handlers
.iter()
.any(|(modifier, _)| matches!(modifier, FailableHandler::Failure(_)));
let has_failure_handler = failable_handlers.iter().any(|(modifier, _)| {
matches!(modifier, FailableHandler::Failure(_))
|| matches!(modifier, FailableHandler::Exited(_, _, _))
});

if !has_failure_handler
&& !modifiers.iter().any(|(modifier, _)| {
Expand Down
223 changes: 122 additions & 101 deletions src/analysis/alpha050/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -190,6 +190,105 @@ pub async fn analyze_global_stmnt(
compiler_flags: vec![],
})];

let data_type = match declared_return_ty {
Some((ty, _)) => ty.clone(),
None => {
let generic_id = scoped_generics_map.new_generic_id();
scoped_generics_map.constrain_generic_type(generic_id, DataType::Any);
new_generic_types.push(generic_id);

DataType::Generic(generic_id)
}
};
{
let mut symbol_table = backend
.files
.symbol_table
.entry((file_id, file_version))
.or_default();

insert_symbol_definition(
&mut symbol_table,
&SymbolInfo {
name: name.to_string(),
symbol_type: SymbolType::Function(FunctionSymbol {
arguments: args
.iter()
.enumerate()
.filter_map(|(idx, (arg, span))| match arg {
FunctionArgument::Generic((is_ref, _), (name, _)) => {
Some((
analysis::FunctionArgument {
name: name.clone(),
data_type: DataType::Generic(
new_generic_types[idx],
),
is_optional: false,
is_ref: *is_ref,
},
*span,
))
}
FunctionArgument::Typed(
(is_ref, _),
(name, _),
(ty, _),
) => Some((
analysis::FunctionArgument {
name: name.clone(),
data_type: ty.clone(),
is_optional: false,
is_ref: *is_ref,
},
*span,
)),
FunctionArgument::Optional(
(is_ref, _),
(name, _),
ty,
_,
) => Some((
analysis::FunctionArgument {
name: name.clone(),
data_type: match ty {
Some((ty, _)) => ty.clone(),
None => {
DataType::Generic(new_generic_types[idx])
}
},
is_optional: true,
is_ref: *is_ref,
},
*span,
)),
FunctionArgument::Error => None,
})
.collect::<Vec<_>>(),
Comment on lines +215 to +266
Copy link

Copilot AI Nov 22, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Potential index out of bounds error when accessing new_generic_types[idx]. The variable idx represents the position in the args array, but new_generic_types only contains entries for generic and optional arguments without explicit types.

For example, with fun foo(a: Num, b), args has 2 elements but new_generic_types has only 1. When processing argument at index 0 (a: Num), the code would try to access new_generic_types[0] in the Typed branch (line 236), which doesn't make sense since typed arguments don't add to new_generic_types. When processing argument at index 1 (b), it would try new_generic_types[1] which is out of bounds.

The old code used new_generic_types.remove(0) which correctly dequeued generics in order. Consider maintaining a separate counter for generic indices or reverting to the dequeue approach.

Suggested change
arguments: args
.iter()
.enumerate()
.filter_map(|(idx, (arg, span))| match arg {
FunctionArgument::Generic((is_ref, _), (name, _)) => {
Some((
analysis::FunctionArgument {
name: name.clone(),
data_type: DataType::Generic(
new_generic_types[idx],
),
is_optional: false,
is_ref: *is_ref,
},
*span,
))
}
FunctionArgument::Typed(
(is_ref, _),
(name, _),
(ty, _),
) => Some((
analysis::FunctionArgument {
name: name.clone(),
data_type: ty.clone(),
is_optional: false,
is_ref: *is_ref,
},
*span,
)),
FunctionArgument::Optional(
(is_ref, _),
(name, _),
ty,
_,
) => Some((
analysis::FunctionArgument {
name: name.clone(),
data_type: match ty {
Some((ty, _)) => ty.clone(),
None => {
DataType::Generic(new_generic_types[idx])
}
},
is_optional: true,
is_ref: *is_ref,
},
*span,
)),
FunctionArgument::Error => None,
})
.collect::<Vec<_>>(),
arguments: {
let mut new_generic_types = new_generic_types.clone();
args
.iter()
.enumerate()
.filter_map(|(_idx, (arg, span))| match arg {
FunctionArgument::Generic((is_ref, _), (name, _)) => {
Some((
analysis::FunctionArgument {
name: name.clone(),
data_type: DataType::Generic(
new_generic_types.remove(0),
),
is_optional: false,
is_ref: *is_ref,
},
*span,
))
}
FunctionArgument::Typed(
(is_ref, _),
(name, _),
(ty, _),
) => Some((
analysis::FunctionArgument {
name: name.clone(),
data_type: ty.clone(),
is_optional: false,
is_ref: *is_ref,
},
*span,
)),
FunctionArgument::Optional(
(is_ref, _),
(name, _),
ty,
_,
) => Some((
analysis::FunctionArgument {
name: name.clone(),
data_type: match ty {
Some((ty, _)) => ty.clone(),
None => {
DataType::Generic(new_generic_types.remove(0))
}
},
is_optional: true,
is_ref: *is_ref,
},
*span,
)),
FunctionArgument::Error => None,
})
.collect::<Vec<_>>()
},

Copilot uses AI. Check for mistakes.
is_public: *is_pub,
compiler_flags: compiler_flags
.iter()
.map(|(flag, _)| flag.clone())
.collect(),
docs: match contexts.clone().last() {
Some(Context::DocString(doc)) => {
contexts.pop();
Some(doc.clone())
}
_ => None,
},
}),
data_type: data_type.clone(),
is_definition: true,
undefined: false,
span: *name_span,
contexts: vec![],
},
(file_id, file_version),
0..=usize::MAX,
Copy link

Copilot AI Nov 22, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Using 0..=usize::MAX as the definition scope allows the function to be referenced from anywhere in the file, including before its definition. This enables recursion but also permits forward references that may be unintended.

Consider using name_span.start..=usize::MAX instead, which would make the function visible from the point of its declaration forward (including within its own body for recursion) while preventing forward references from earlier in the file. This would be more consistent with typical scoping rules.

Suggested change
0..=usize::MAX,
name_span.start..=usize::MAX,

Copilot uses AI. Check for mistakes.
*is_pub,
);
}

body.iter().for_each(|stmnt| {
let StmntAnalysisResult {
return_ty,
Expand Down Expand Up @@ -225,109 +324,31 @@ pub async fn analyze_global_stmnt(
inferred_return_type = DataType::Failable(Box::new(inferred_return_type));
}

let data_type = match declared_return_ty {
Some((ty, ty_span)) => {
if !matches_type(ty, &inferred_return_type, &backend.files.generic_types) {
backend.files.report_error(
&(file_id, file_version),
&format!(
"Function returns type {inferred_return_type:?}, but expected {ty:?}",
),
*ty_span,
);
}

if is_propagating && !matches!(ty, DataType::Failable(_)) {
backend.files.report_error(
&(file_id, file_version),
"Function is propagating an error, but return type is not failable",
*ty_span,
);
}

ty.clone()
if let Some((ty, ty_span)) = declared_return_ty {
if !matches_type(ty, &inferred_return_type, &backend.files.generic_types) {
backend.files.report_error(
&(file_id, file_version),
&format!(
"Function returns type {inferred_return_type:?}, but expected {ty:?}",
),
*ty_span,
);
}
None => inferred_return_type,
};

let mut symbol_table = backend
.files
.symbol_table
.entry((file_id, file_version))
.or_default();

insert_symbol_definition(
&mut symbol_table,
&SymbolInfo {
name: name.to_string(),
symbol_type: SymbolType::Function(FunctionSymbol {
arguments: args
.iter()
.filter_map(|(arg, span)| match arg {
FunctionArgument::Generic((is_ref, _), (name, _)) => Some((
analysis::FunctionArgument {
name: name.clone(),
data_type: DataType::Generic(
new_generic_types.remove(0),
),
is_optional: false,
is_ref: *is_ref,
},
*span,
)),
FunctionArgument::Typed((is_ref, _), (name, _), (ty, _)) => {
Some((
analysis::FunctionArgument {
name: name.clone(),
data_type: ty.clone(),
is_optional: false,
is_ref: *is_ref,
},
*span,
))
}
FunctionArgument::Optional((is_ref, _), (name, _), ty, _) => {
Some((
analysis::FunctionArgument {
name: name.clone(),
data_type: match ty {
Some((ty, _)) => ty.clone(),
None => DataType::Generic(
new_generic_types.remove(0),
),
},
is_optional: true,
is_ref: *is_ref,
},
*span,
))
}
FunctionArgument::Error => None,
})
.collect::<Vec<_>>(),
is_public: *is_pub,
compiler_flags: compiler_flags
.iter()
.map(|(flag, _)| flag.clone())
.collect(),
docs: match contexts.clone().last() {
Some(Context::DocString(doc)) => {
contexts.pop();
Some(doc.clone())
}
_ => None,
},
}),
data_type: data_type.clone(),
is_definition: true,
undefined: false,
span: *name_span,
contexts: vec![],
},
(file_id, file_version),
span.end..=usize::MAX,
*is_pub,
);
if is_propagating && !matches!(ty, DataType::Failable(_)) {
backend.files.report_error(
&(file_id, file_version),
"Function is propagating an error, but return type is not failable",
*ty_span,
);
}
} else if let Some(generic_id) = new_generic_types.last() {
backend
.files
.generic_types
.constrain_generic_type(*generic_id, inferred_return_type);
backend.files.generic_types.mark_as_inferred(*generic_id);
}
}
GlobalStatement::Import(
(is_public_import, _),
Expand Down
9 changes: 5 additions & 4 deletions src/analysis/alpha050/stmnts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -952,9 +952,10 @@ pub fn analyze_stmnt(
contexts,
);

let has_failure_handler = failable_handlers
.iter()
.any(|(modifier, _)| matches!(modifier, FailableHandler::Failure(_)));
let has_failure_handler = failable_handlers.iter().any(|(modifier, _)| {
matches!(modifier, FailableHandler::Failure(_))
|| matches!(modifier, FailableHandler::Exited(_, _, _))
});

if !has_failure_handler
&& !modifiers.iter().any(|(modifier, _)| {
Expand Down Expand Up @@ -1169,7 +1170,7 @@ pub fn analyze_failable_handlers(
types.extend(return_ty);
is_propagating |= is_propagating_failure;
}
FailableHandler::Then(_, (code_var, code_var_span), block) => {
FailableHandler::Exited(_, (code_var, code_var_span), block) => {
let mut symbol_table = files
.symbol_table
.entry((file_id, file_version))
Expand Down
1 change: 1 addition & 0 deletions src/analysis/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -297,6 +297,7 @@ pub fn matches_type(expected: &DataType, given: &DataType, generics_map: &Generi
(DataType::Error, _) | (_, DataType::Error) => false,
(expected, DataType::Failable(given)) => matches_type(expected, given, generics_map),
(DataType::Failable(expected), given) => matches_type(expected, given, generics_map),
(DataType::Number, DataType::Int) => true,
(t1, t2) => *t1 == *t2,
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/grammar/alpha050/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ pub enum FailureHandler {
pub enum FailableHandler {
Failure(Spanned<FailureHandler>),
Succeeded(Spanned<String>, Box<Spanned<Block>>),
Then(Spanned<String>, Spanned<String>, Box<Spanned<Block>>),
Exited(Spanned<String>, Spanned<String>, Box<Spanned<Block>>),
}

#[derive(PartialEq, Debug, Clone)]
Expand Down
4 changes: 2 additions & 2 deletions src/grammar/alpha050/semantic_tokens.rs
Original file line number Diff line number Diff line change
Expand Up @@ -924,10 +924,10 @@ fn semantic_tokens_from_failable_handlers(
block.1,
)]));
}
FailableHandler::Then((_, failed_span), (_, exit_code_span), block) => {
FailableHandler::Exited((_, exited_span), (_, exit_code_span), block) => {
tokens.push((
hash_semantic_token_type(SemanticTokenType::KEYWORD),
*failed_span,
*exited_span,
));

tokens.push((
Expand Down
4 changes: 2 additions & 2 deletions src/grammar/alpha050/statements/failable_handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ fn succeeded_parser<'a>(
fn then_parser<'a>(
stmnts: impl AmberParser<'a, Spanned<Statement>>,
) -> impl AmberParser<'a, Spanned<FailableHandler>> {
just(T!["then"])
just(T!["exited"])
.map_with(|t, e| (t.to_string(), e.span()))
.then(
just(T!['('])
Expand All @@ -88,7 +88,7 @@ fn then_parser<'a>(
)))
.map_with(|((succeeded_keyword, status_code), block), e| {
(
FailableHandler::Then(succeeded_keyword, status_code, Box::new(block)),
FailableHandler::Exited(succeeded_keyword, status_code, Box::new(block)),
e.span(),
)
})
Expand Down
Loading
Loading