🌐 AI搜索 & 代理 主页
Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Updated load_dataset to be resistent to bad columns
  • Loading branch information
SilasMarvin committed Jun 6, 2024
commit 600bbfe8541e68455479fbb0347f22b5840e3193
49 changes: 30 additions & 19 deletions pgml-extension/src/bindings/transformers/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -434,54 +434,65 @@ pub fn load_dataset(
Spi::run(&format!(r#"CREATE TABLE {table_name} ({column_types})"#))?;
let insert = format!(r#"INSERT INTO {table_name} ({column_names}) VALUES ({column_placeholders})"#);
for i in 0..num_rows {
let mut skip = false;
let mut row = Vec::with_capacity(num_cols);
for (name, values) in data {
let value = values
.as_array()
.ok_or_else(|| anyhow!("expected {values} to be an array"))?
.get(i)
.ok_or_else(|| anyhow!("invalid index {i} for {values}"))?;
match types
let (ty, datum) = match types
.get(name)
.ok_or_else(|| anyhow!("{types:?} expected to have key {name}"))?
.as_str()
.ok_or_else(|| anyhow!("json field {name} expected to be string"))?
{
"string" => row.push((
"string" => (
PgBuiltInOids::TEXTOID.oid(),
value
.as_str()
.ok_or_else(|| anyhow!("expected {value} to be string"))?
.into_datum(),
)),
"dict" | "list" => row.push((PgBuiltInOids::JSONBOID.oid(), JsonB(value.clone()).into_datum())),
"int64" | "int32" | "int16" => row.push((
.map(IntoDatum::into_datum)
.ok_or_else(|| anyhow!("expected column {name} with {value} to be string")),
),
"dict" | "list" => (PgBuiltInOids::JSONBOID.oid(), Ok(JsonB(value.clone()).into_datum())),
"int64" | "int32" | "int16" => (
PgBuiltInOids::INT8OID.oid(),
value
.as_i64()
.ok_or_else(|| anyhow!("expected {value} to be i64"))?
.into_datum(),
)),
"float64" | "float32" | "float16" => row.push((
.map(IntoDatum::into_datum)
.ok_or_else(|| anyhow!("expected column {name} with {value} to be i64")),
),
"float64" | "float32" | "float16" => (
PgBuiltInOids::FLOAT8OID.oid(),
value
.as_f64()
.ok_or_else(|| anyhow!("expected {value} to be f64"))?
.into_datum(),
)),
"bool" => row.push((
.map(IntoDatum::into_datum)
.ok_or_else(|| anyhow!("expected column {name} with {value} to be f64")),
),
"bool" => (
PgBuiltInOids::BOOLOID.oid(),
value
.as_bool()
.ok_or_else(|| anyhow!("expected {value} to be bool"))?
.into_datum(),
)),
.map(IntoDatum::into_datum)
.ok_or_else(|| anyhow!("expected column {name} with {value} to be bool")),
),
type_ => {
bail!("unhandled dataset value type while reading dataset: {value:?} {type_:?}")
}
};
match datum {
Ok(datum) => row.push((ty, datum)),
Err(e) => {
warning!("failed to convert dataset value to datum while reading dataset: {e}");
skip = true;
break;
}
}
}
Spi::run_with_args(&insert, Some(row))?
if !skip {
Spi::run_with_args(&insert, Some(row))?
}
}

Ok(num_rows)
Expand Down