Skip to content

Commit

Permalink
revert
Browse files Browse the repository at this point in the history
  • Loading branch information
max-kammerer committed Dec 10, 2024
1 parent c136a88 commit caf95a5
Showing 1 changed file with 37 additions and 37 deletions.
74 changes: 37 additions & 37 deletions rust/cubesql/cubesql/src/sql/dataframe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -404,7 +404,7 @@ pub fn arrow_to_column_type(arrow_type: DataType) -> Result<ColumnType, CubeErro
DataType::Utf8 | DataType::LargeUtf8 => Ok(ColumnType::String),
DataType::Date32 => Ok(ColumnType::Date(false)),
DataType::Date64 => Ok(ColumnType::Date(true)),
DataType::Timestamp(_, _) => Ok(ColumnType::Timestamp),
DataType::Timestamp(_, _) => Ok(ColumnType::String),
DataType::Interval(unit) => Ok(ColumnType::Interval(unit)),
DataType::Float16 | DataType::Float32 | DataType::Float64 => Ok(ColumnType::Double),
DataType::Boolean => Ok(ColumnType::Boolean),
Expand Down Expand Up @@ -826,7 +826,7 @@ mod tests {
(DataType::Date64, ColumnType::Date(true)),
(
DataType::Timestamp(TimeUnit::Second, None),
ColumnType::Timestamp,
ColumnType::String,
),
(
DataType::Interval(IntervalUnit::YearMonth),
Expand Down Expand Up @@ -895,41 +895,41 @@ mod tests {
Ok(())
}

#[test]
fn test_timestamp_conversion() -> Result<(), CubeError> {
let data_nano = vec![Some(1640995200000000000)];
create_record_batch(
DataType::Timestamp(TimeUnit::Nanosecond, None),
TimestampNanosecondArray::from(data_nano.clone()),
ColumnType::Timestamp,
data_nano
.into_iter()
.map(|e| TableValue::Timestamp(TimestampValue::new(e.unwrap(), None)))
.collect::<Vec<_>>(),
)?;

let data_micro = vec![Some(1640995200000000)];
create_record_batch(
DataType::Timestamp(TimeUnit::Microsecond, None),
TimestampMicrosecondArray::from(data_micro.clone()),
ColumnType::Timestamp,
data_micro
.into_iter()
.map(|e| TableValue::Timestamp(TimestampValue::new(e.unwrap() * 1000, None)))
.collect::<Vec<_>>(),
)?;

let data_milli = vec![Some(1640995200000)];
create_record_batch(
DataType::Timestamp(TimeUnit::Millisecond, None),
TimestampMillisecondArray::from(data_milli.clone()),
ColumnType::Timestamp,
data_milli
.into_iter()
.map(|e| TableValue::Timestamp(TimestampValue::new(e.unwrap() * 1000000, None)))
.collect::<Vec<_>>(),
)
}
// #[test]
// fn test_timestamp_conversion() -> Result<(), CubeError> {
// let data_nano = vec![Some(1640995200000000000)];
// create_record_batch(
// DataType::Timestamp(TimeUnit::Nanosecond, None),
// TimestampNanosecondArray::from(data_nano.clone()),
// ColumnType::Timestamp,
// data_nano
// .into_iter()
// .map(|e| TableValue::Timestamp(TimestampValue::new(e.unwrap(), None)))
// .collect::<Vec<_>>(),
// )?;
//
// let data_micro = vec![Some(1640995200000000)];
// create_record_batch(
// DataType::Timestamp(TimeUnit::Microsecond, None),
// TimestampMicrosecondArray::from(data_micro.clone()),
// ColumnType::Timestamp,
// data_micro
// .into_iter()
// .map(|e| TableValue::Timestamp(TimestampValue::new(e.unwrap() * 1000, None)))
// .collect::<Vec<_>>(),
// )?;
//
// let data_milli = vec![Some(1640995200000)];
// create_record_batch(
// DataType::Timestamp(TimeUnit::Millisecond, None),
// TimestampMillisecondArray::from(data_milli.clone()),
// ColumnType::Timestamp,
// data_milli
// .into_iter()
// .map(|e| TableValue::Timestamp(TimestampValue::new(e.unwrap() * 1000000, None)))
// .collect::<Vec<_>>(),
// )
// }

#[test]
fn test_signed_conversion() -> Result<(), CubeError> {
Expand Down

0 comments on commit caf95a5

Please sign in to comment.