Skip to content

Commit

Permalink
Update clippy to run for all targets and with python-bindings feature
Browse files Browse the repository at this point in the history
  • Loading branch information
umut-sahin authored and brandonwillard committed Oct 10, 2024
1 parent ba10c61 commit 9ee152f
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 7 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,6 @@ repos:
description: Check files with cargo clippy
entry: cargo clippy
language: system
args: ["--", "-D", "warnings"]
args: ["--all-targets", "--features", "python-bindings", "--", "-D", "warnings"]
types: [rust]
pass_filenames: false
10 changes: 4 additions & 6 deletions src/python_bindings/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ pub fn build_regex_from_schema_py(
#[pyfunction(name = "to_regex")]
#[pyo3(signature = (json, whitespace_pattern=None))]
pub fn to_regex_py(json: Bound<PyDict>, whitespace_pattern: Option<&str>) -> PyResult<String> {
let json_value: Value = serde_pyobject::from_pyobject(json).unwrap();
let json_value: Value = serde_pyobject::from_pyobject(json)?;
json_schema::to_regex(&json_value, whitespace_pattern, &json_value)
.map_err(|e| PyValueError::new_err(e.to_string()))
}
Expand Down Expand Up @@ -173,13 +173,11 @@ pub fn create_fsm_index_end_to_end_py<'py>(

for (token_id, end_state) in token_ids_end_states {
if let Ok(Some(existing_dict)) = states_to_token_subsets.get_item(start_state) {
existing_dict.set_item(token_id, end_state).unwrap();
existing_dict.set_item(token_id, end_state)?;
} else {
let new_dict = PyDict::new_bound(py);
new_dict.set_item(token_id, end_state).unwrap();
states_to_token_subsets
.set_item(start_state, new_dict)
.unwrap();
new_dict.set_item(token_id, end_state)?;
states_to_token_subsets.set_item(start_state, new_dict)?;
}

if !seen.contains(&end_state) {
Expand Down

0 comments on commit 9ee152f

Please sign in to comment.