feat(update): add aggregation pipeline updates and enforce immutable _id handling
This commit is contained in:
@@ -27,6 +27,9 @@ pub enum CommandError {
|
||||
#[error("duplicate key: {0}")]
|
||||
DuplicateKey(String),
|
||||
|
||||
#[error("immutable field: {0}")]
|
||||
ImmutableField(String),
|
||||
|
||||
#[error("internal error: {0}")]
|
||||
InternalError(String),
|
||||
}
|
||||
@@ -43,6 +46,7 @@ impl CommandError {
|
||||
CommandError::NamespaceNotFound(_) => (26, "NamespaceNotFound"),
|
||||
CommandError::NamespaceExists(_) => (48, "NamespaceExists"),
|
||||
CommandError::DuplicateKey(_) => (11000, "DuplicateKey"),
|
||||
CommandError::ImmutableField(_) => (66, "ImmutableField"),
|
||||
CommandError::InternalError(_) => (1, "InternalError"),
|
||||
};
|
||||
|
||||
|
||||
@@ -21,6 +21,11 @@ pub async fn handle(
|
||||
}
|
||||
}
|
||||
|
||||
enum TUpdateSpec {
|
||||
Document(Document),
|
||||
Pipeline(Vec<Document>),
|
||||
}
|
||||
|
||||
/// Handle the `update` command.
|
||||
async fn handle_update(
|
||||
cmd: &Document,
|
||||
@@ -78,21 +83,22 @@ async fn handle_update(
|
||||
};
|
||||
|
||||
let update = match update_spec.get("u") {
|
||||
Some(Bson::Document(d)) => d.clone(),
|
||||
Some(Bson::Array(_pipeline)) => {
|
||||
// Aggregation pipeline updates are not yet supported; treat as error.
|
||||
write_errors.push(doc! {
|
||||
"index": idx as i32,
|
||||
"code": 14_i32,
|
||||
"codeName": "TypeMismatch",
|
||||
"errmsg": "aggregation pipeline updates not yet supported",
|
||||
});
|
||||
if ordered {
|
||||
break;
|
||||
Some(update_value) => match parse_update_spec(update_value) {
|
||||
Ok(parsed) => parsed,
|
||||
Err(err) => {
|
||||
write_errors.push(doc! {
|
||||
"index": idx as i32,
|
||||
"code": 14_i32,
|
||||
"codeName": "TypeMismatch",
|
||||
"errmsg": err,
|
||||
});
|
||||
if ordered {
|
||||
break;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
_ => {
|
||||
},
|
||||
None => {
|
||||
write_errors.push(doc! {
|
||||
"index": idx as i32,
|
||||
"code": 14_i32,
|
||||
@@ -137,21 +143,12 @@ async fn handle_update(
|
||||
let new_doc = build_upsert_doc(&filter);
|
||||
|
||||
// Apply update operators or replacement.
|
||||
match UpdateEngine::apply_update(&new_doc, &update, array_filters.as_deref()) {
|
||||
match apply_update_spec(&new_doc, &update, array_filters.as_deref()) {
|
||||
Ok(mut updated) => {
|
||||
// Apply $setOnInsert if present.
|
||||
if let Some(Bson::Document(soi)) = update.get("$setOnInsert") {
|
||||
UpdateEngine::apply_set_on_insert(&mut updated, soi);
|
||||
}
|
||||
apply_set_on_insert_if_present(&update, &mut updated);
|
||||
|
||||
// Ensure _id exists.
|
||||
let new_id = if !updated.contains_key("_id") {
|
||||
let oid = ObjectId::new();
|
||||
updated.insert("_id", oid);
|
||||
Bson::ObjectId(oid)
|
||||
} else {
|
||||
updated.get("_id").unwrap().clone()
|
||||
};
|
||||
let new_id = ensure_document_id(&mut updated);
|
||||
|
||||
// Pre-check unique index constraints before upsert insert.
|
||||
if let Some(engine) = ctx.indexes.get(&ns_key) {
|
||||
@@ -229,12 +226,21 @@ async fn handle_update(
|
||||
};
|
||||
|
||||
for matched_doc in &docs_to_update {
|
||||
match UpdateEngine::apply_update(
|
||||
matched_doc,
|
||||
&update,
|
||||
array_filters.as_deref(),
|
||||
) {
|
||||
Ok(updated_doc) => {
|
||||
match apply_update_spec(matched_doc, &update, array_filters.as_deref()) {
|
||||
Ok(mut updated_doc) => {
|
||||
if let Err(e) = ensure_immutable_id(matched_doc, &mut updated_doc) {
|
||||
write_errors.push(doc! {
|
||||
"index": idx as i32,
|
||||
"code": 66_i32,
|
||||
"codeName": "ImmutableField",
|
||||
"errmsg": e.to_string(),
|
||||
});
|
||||
if ordered {
|
||||
break;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Pre-check unique index constraints before storage write.
|
||||
if let Some(engine) = ctx.indexes.get(&ns_key) {
|
||||
if let Err(e) = engine.check_unique_constraints_for_update(matched_doc, &updated_doc) {
|
||||
@@ -361,8 +367,11 @@ async fn handle_find_and_modify(
|
||||
};
|
||||
|
||||
let update_doc = match cmd.get("update") {
|
||||
Some(Bson::Document(d)) => Some(d.clone()),
|
||||
_ => None,
|
||||
Some(update_value) => Some(
|
||||
parse_update_spec(update_value)
|
||||
.map_err(CommandError::InvalidArgument)?
|
||||
),
|
||||
None => None,
|
||||
};
|
||||
|
||||
let remove = match cmd.get("remove") {
|
||||
@@ -477,12 +486,14 @@ async fn handle_find_and_modify(
|
||||
|
||||
if let Some(original_doc) = target {
|
||||
// Update the matched document.
|
||||
let updated_doc = UpdateEngine::apply_update(
|
||||
let mut updated_doc = apply_update_spec(
|
||||
&original_doc,
|
||||
&update,
|
||||
array_filters.as_deref(),
|
||||
)
|
||||
.map_err(|e| CommandError::InternalError(e.to_string()))?;
|
||||
.map_err(CommandError::InternalError)?;
|
||||
|
||||
ensure_immutable_id(&original_doc, &mut updated_doc)?;
|
||||
|
||||
// Pre-check unique index constraints before storage write.
|
||||
if let Some(engine) = ctx.indexes.get(&ns_key) {
|
||||
@@ -533,26 +544,17 @@ async fn handle_find_and_modify(
|
||||
// Upsert: create a new document.
|
||||
let new_doc = build_upsert_doc(&query);
|
||||
|
||||
let mut updated_doc = UpdateEngine::apply_update(
|
||||
let mut updated_doc = apply_update_spec(
|
||||
&new_doc,
|
||||
&update,
|
||||
array_filters.as_deref(),
|
||||
)
|
||||
.map_err(|e| CommandError::InternalError(e.to_string()))?;
|
||||
.map_err(CommandError::InternalError)?;
|
||||
|
||||
// Apply $setOnInsert if present.
|
||||
if let Some(Bson::Document(soi)) = update.get("$setOnInsert") {
|
||||
UpdateEngine::apply_set_on_insert(&mut updated_doc, soi);
|
||||
}
|
||||
apply_set_on_insert_if_present(&update, &mut updated_doc);
|
||||
|
||||
// Ensure _id.
|
||||
let upserted_id = if !updated_doc.contains_key("_id") {
|
||||
let oid = ObjectId::new();
|
||||
updated_doc.insert("_id", oid);
|
||||
Bson::ObjectId(oid)
|
||||
} else {
|
||||
updated_doc.get("_id").unwrap().clone()
|
||||
};
|
||||
let upserted_id = ensure_document_id(&mut updated_doc);
|
||||
|
||||
// Pre-check unique index constraints before upsert insert.
|
||||
if let Some(engine) = ctx.indexes.get(&ns_key) {
|
||||
@@ -667,6 +669,88 @@ fn build_upsert_doc(filter: &Document) -> Document {
|
||||
doc
|
||||
}
|
||||
|
||||
fn parse_update_spec(update_value: &Bson) -> Result<TUpdateSpec, String> {
|
||||
match update_value {
|
||||
Bson::Document(d) => Ok(TUpdateSpec::Document(d.clone())),
|
||||
Bson::Array(stages) => {
|
||||
if stages.is_empty() {
|
||||
return Err("aggregation pipeline update cannot be empty".into());
|
||||
}
|
||||
|
||||
let mut pipeline = Vec::with_capacity(stages.len());
|
||||
for stage in stages {
|
||||
match stage {
|
||||
Bson::Document(d) => pipeline.push(d.clone()),
|
||||
_ => {
|
||||
return Err(
|
||||
"aggregation pipeline update stages must be documents".into(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(TUpdateSpec::Pipeline(pipeline))
|
||||
}
|
||||
_ => Err("missing or invalid 'u' field in update spec".into()),
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_update_spec(
|
||||
doc: &Document,
|
||||
update: &TUpdateSpec,
|
||||
array_filters: Option<&[Document]>,
|
||||
) -> Result<Document, String> {
|
||||
match update {
|
||||
TUpdateSpec::Document(update_doc) => UpdateEngine::apply_update(doc, update_doc, array_filters)
|
||||
.map_err(|e| e.to_string()),
|
||||
TUpdateSpec::Pipeline(pipeline) => {
|
||||
if array_filters.is_some_and(|filters| !filters.is_empty()) {
|
||||
return Err(
|
||||
"arrayFilters are not supported with aggregation pipeline updates"
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
|
||||
UpdateEngine::apply_pipeline_update(doc, pipeline).map_err(|e| e.to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_set_on_insert_if_present(update: &TUpdateSpec, doc: &mut Document) {
|
||||
if let TUpdateSpec::Document(update_doc) = update {
|
||||
if let Some(Bson::Document(soi)) = update_doc.get("$setOnInsert") {
|
||||
UpdateEngine::apply_set_on_insert(doc, soi);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn ensure_document_id(doc: &mut Document) -> Bson {
|
||||
if let Some(id) = doc.get("_id") {
|
||||
id.clone()
|
||||
} else {
|
||||
let oid = ObjectId::new();
|
||||
doc.insert("_id", oid);
|
||||
Bson::ObjectId(oid)
|
||||
}
|
||||
}
|
||||
|
||||
fn ensure_immutable_id(original_doc: &Document, updated_doc: &mut Document) -> CommandResult<()> {
|
||||
if let Some(original_id) = original_doc.get("_id") {
|
||||
match updated_doc.get("_id") {
|
||||
Some(updated_id) if updated_id == original_id => Ok(()),
|
||||
Some(_) => Err(CommandError::ImmutableField(
|
||||
"cannot modify immutable field '_id'".into(),
|
||||
)),
|
||||
None => {
|
||||
updated_doc.insert("_id", original_id.clone());
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract _id as a string for storage operations.
|
||||
fn extract_id_string(doc: &Document) -> String {
|
||||
match doc.get("_id") {
|
||||
|
||||
Reference in New Issue
Block a user