Skip to content

Commit

Permalink
Refactor tests II (#7)
Browse files Browse the repository at this point in the history
* Refactor write image

* Refactor write image II

* Update gitignore

* Refactor query test

* Format code

* Add docker compose yaml

* Populate test data

* Fix clippy warnings

* Fix pipeline

* Fix pipeline II

* Fix pipeline III
  • Loading branch information
quambene authored Apr 3, 2024
1 parent 688abc5 commit 9edddfb
Show file tree
Hide file tree
Showing 8 changed files with 115 additions and 139 deletions.
15 changes: 13 additions & 2 deletions .github/workflows/rust-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,11 @@ on:
pull_request:
env:
CARGO_TERM_COLOR: always
DB_HOST: 127.0.0.1
DB_PORT: 5432
DB_USER: pigeon
DB_PASSWORD: pigeon-pw
DB_NAME: pigeon-db
jobs:
check:
runs-on: ubuntu-latest
Expand Down Expand Up @@ -55,8 +60,12 @@ jobs:
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
- name: Start container
run: docker compose up -d
- name: cargo test --test '*'
run: cargo test --test '*' --locked
- name: Stop container
run: docker compose down -v
os-test:
runs-on: ${{ matrix.os }}
name: os-test / ${{ matrix.os }}
Expand All @@ -69,8 +78,6 @@ jobs:
- uses: dtolnay/rust-toolchain@stable
- name: cargo test --lib
run: cargo test --lib --locked
- name: cargo test --test '*'
run: cargo test --test '*' --locked
doc-test:
runs-on: ubuntu-latest
steps:
Expand All @@ -90,8 +97,12 @@ jobs:
- name: cargo generate-lockfile
if: hashFiles('Cargo.lock') == ''
run: cargo generate-lockfile
- name: Start container
run: docker compose up -d
- name: cargo llvm-cov
run: cargo llvm-cov --locked --all-features --lcov --output-path lcov.info
- name: Stop container
run: docker compose down -v
- name: Upload to codecov.io
uses: codecov/codecov-action@v3
with:
Expand Down
18 changes: 6 additions & 12 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -17,32 +17,26 @@ rust-toolchain
# Environment
**/.env

### Keys
# Keys
**/*.jks
**/*.p8
**/*.pem

### Config files
# Config files
**/*.yaml

### Releases
releases
# Allow docker-compose.yaml
!docker-compose.yaml

### Data
sent_emails
my-sent-emails
saved_queries
my-saved-queries
# Data
**/*.csv
**/*.jpg
**/*.png
**/*.pdf
**/*.odt

### Allow test data
# Allow test data
!test_data/*

### IDEs

# VS Code
**/.vscode
36 changes: 19 additions & 17 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -445,23 +445,25 @@ Sendgrid | equals monthly limit

## Testing

Some integration tests require a locally running database, and an AWS SES account.
Specify the following environment variables:

- SMTP
- `SMTP_SERVER`
- `SMTP_USERNAME`
- `SMTP_PASSWORD`
- AWS SES
- `AWS_ACCESS_KEY_ID`
- `AWS_SECRET_ACCESS_KEY`
- `AWS_REGION`
- Postgres
- `DB_HOST`
- `DB_PORT`
- `DB_USER`
- `DB_PASSWORD`
- `DB_NAME`
Some integration tests require a locally running database, and an AWS SES
account:

1. Specify the following environment variables:
- SMTP
- `SMTP_SERVER`
- `SMTP_USERNAME`
- `SMTP_PASSWORD`
- AWS SES
- `AWS_ACCESS_KEY_ID`
- `AWS_SECRET_ACCESS_KEY`
- `AWS_REGION`
- Postgres
- `DB_HOST`
- `DB_PORT`
- `DB_USER`
- `DB_PASSWORD`
- `DB_NAME`
2. Set up a temporary postgres db: `docker-compose run --rm --service-ports postgres`

``` bash
# Run unit tests and integration tests
Expand Down
13 changes: 13 additions & 0 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
version: '3'
services:
postgres:
volumes:
- ./test_data:/docker-entrypoint-initdb.d
image: postgres:16-alpine
restart: "no"
ports:
- "${DB_PORT}:5432"
environment:
POSTGRES_USER: "${DB_USER}"
POSTGRES_PASSWORD: "${DB_PASSWORD}"
POSTGRES_DB: "${DB_NAME}"
17 changes: 13 additions & 4 deletions src/cmd/query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ pub fn query(matches: &ArgMatches) -> Result<(), anyhow::Error> {
let now = Utc::now();
let conn_vars = ConnVars::from_env()?;
let ssh_tunnel = matches.value_of(arg::SSH_TUNNEL);

let connection = DbConnection::new(&conn_vars, ssh_tunnel)?;
let mut df_query = sources::query_postgres(&connection, query)?;

Expand All @@ -27,15 +26,25 @@ pub fn query(matches: &ArgMatches) -> Result<(), anyhow::Error> {
}

if matches.is_present(arg::SAVE) {
let save_dir = Path::new(arg::value(arg::SAVE_DIR, matches)?);

// If argument 'FILE_TYPE' is not present the default value 'csv' will be used
match matches.value_of(arg::FILE_TYPE) {
Some(file_type) => match file_type {
"csv" => {
let save_dir = Path::new(arg::value(arg::SAVE_DIR, matches)?);
sources::write_csv(&mut df_query, save_dir, now)?;
}
x if x == "jpg" => sources::write_image(matches, df_query, x)?,
x if x == "png" => sources::write_image(matches, df_query, x)?,
"jpg" | "png" => {
let image_column = arg::value(arg::IMAGE_COLUMN, matches)?;
let image_name = arg::value(arg::IMAGE_NAME, matches)?;
sources::write_image(
save_dir,
image_column,
image_name,
df_query,
file_type,
)?;
}
_ => {
return Err(anyhow!(
"Value '{}' not supported for argument '{}'",
Expand Down
76 changes: 23 additions & 53 deletions src/sources/image.rs
Original file line number Diff line number Diff line change
@@ -1,103 +1,73 @@
use crate::arg;
use anyhow::{anyhow, Context};
use clap::ArgMatches;
use anyhow::Context;
use polars::prelude::{DataFrame, DataType, TakeRandom};
use std::{
fs::{self, File},
io::Write,
path::PathBuf,
path::Path,
};
use uuid::Uuid;

pub fn write_image(
matches: &ArgMatches,
target_dir: &Path,
image_column: &str,
image_name: &str,
df: DataFrame,
file_type: &str,
) -> Result<(), anyhow::Error> {
let target_dir = match matches.value_of(arg::SAVE_DIR) {
Some(save_dir) => PathBuf::from(save_dir),
None => return Err(anyhow!("Missing value for argument '{}'", arg::SAVE_DIR)),
};

match target_dir.exists() {
true => (),
false => fs::create_dir(&target_dir).context(format!(
if !target_dir.exists() {
fs::create_dir(target_dir).context(format!(
"Can't create directory: '{}'",
target_dir.display()
))?,
))?;
}

let image_col = match matches.value_of(arg::IMAGE_COLUMN) {
Some(column_name) => column_name,
None => {
return Err(anyhow!(
"Missing value for argument '{}'",
arg::IMAGE_COLUMN
))
}
};
let name_col = match matches.value_of(arg::IMAGE_NAME) {
Some(column_name) => column_name,
None => return Err(anyhow!("Missing value for argument '{}'", arg::IMAGE_NAME)),
};

for i in 0..df.height() {
let data_type = df
.column(name_col)
.column(image_name)
.context("Can't find column for image name")?
.dtype();

let image_name = match data_type {
DataType::Utf8 => df
.column(name_col)
.column(image_name)
.context("Can't find column for image name")?
.utf8()
.context("Can't convert series to chunked array")?
.get(i)
.map(|str| str.to_string()),
DataType::Null => None,
_ => Some(
df.column(name_col)
df.column(image_name)
.context("Can't find column for image name")?
.get(i)?
.to_string(),
),
};

let image_name = match image_name {
Some(image_name) => image_name,
None => {
// Indicate the missing image name by a UUID
Uuid::new_v4().to_hyphenated().to_string()
}
};

let image_name = image_name.unwrap_or(Uuid::new_v4().to_hyphenated().to_string());
let target_file = image_name + "." + file_type;
let target_path = target_dir.join(target_file);

let image = df
.column(image_col)
.column(image_column)
.context("Can't find column for images")?
.list()
.context("Can't convert series to chunked array")?
.get(i);

println!("Save query result to file: {}", target_path.display());

match image {
Some(image) => {
let bytes: Vec<u8> = image
.u8()
.context("Can't convert series to chunked array")?
.into_iter()
.map(|byte| byte.expect("Can't convert series to bytes"))
.collect();
if let Some(image) = image {
let bytes = image
.u8()
.context("Can't convert series to chunked array")?
.into_iter()
.map(|byte| byte.expect("Can't convert series to bytes"))
.collect::<Vec<_>>();

let mut file = File::create(target_path).context("Unable to create file")?;
file.write_all(bytes.as_slice())
.context("Unable to write file.")?;
}
None => continue,
let mut file = File::create(target_path).context("Unable to create file")?;
file.write_all(bytes.as_slice())
.context("Unable to write file.")?;
}
}

Expand Down
10 changes: 10 additions & 0 deletions test_data/init.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
CREATE TABLE account (
id serial primary key,
first_name character varying,
last_name character varying,
email character varying NOT NULL
);

COPY account(first_name, last_name, email)
FROM
'/docker-entrypoint-initdb.d/contacts.csv' DELIMITER ',' CSV HEADER;
Loading

0 comments on commit 9edddfb

Please sign in to comment.