Skip to content

Commit

Permalink
Radarr and Sonarr integration (#936)
Browse files Browse the repository at this point in the history
* build(backend): add radarr api client

* feat(backend): add new lot for integration

* feat(database): add migrations for new stuff

* feat(backend): adapt to new database schema

* chore(frontend): set default value

* feat(backend): add new source

* feat(frontend): do not allow editing readarr integration

* feat(frontend): do not ask for progress for push integrations

* feat(database): columns to store destination specifics

* feat(backend): allow adding destination specifics to integration

* feat(frontend): allow adding radarr integration

* feat(backend): sync integrations stuff

* chore(frontend): adapt to new gql schema

* refactor(backend): change name of function

* feat(backend): sync collection ids too

* feat(frontend): ask for sync collection ids as well

* fix(frontend): no show allowed

* feat(frontend): allow editing push integrations

* feat(database,backend,frontend): change name for preference to disable integrations

* feat(backend): stub functions to add stuff to radarr

* feat(backend): allow adding movies to radarr

* refactor(backend): move function out of integration

* refactor(backend): import with prefix

* refactor(backend): inline immediate return

* fix(frontend): make collections searchable

* feat(config): change name of config param

* docs: add instructions for radarr integration

* chore(frontend): hide controls for profile id

* feat(backend): select more columns

* feat(database): add column for tracking system information for collection_to_entity

* feat(backend): store cte extra information

* refactor(backend): send movies to radarr in a loop

* chore(backend): use unused result

* fix(backend): do not sync if already done

* chore(backend): always return true from function

* feat(backend): update collection_to_entity when radarr sync complete

* fix(backend): log when movie already synced

* feat(database): add new column for destination

* Revert "feat(database): add new column for destination"

This reverts commit 2b31cfd.

* feat(database): merge columns into one

* feat(backend): adapt to new database schema

* chore(frontend): adapt to new gql schema

* feat(backend): respect disable_integrations general preference

* feat(backend): store exactly which integration was synced

* chore(database): comment

* feat(database): account for incorrect finished shows calculations

* build(backend): add sonarr deps

* docs: add info about sonnar integration

* feat(backend): add new integration provider

* refactor(frontend): component to create dyanmic arr inputs

* feat(frontend): allow creating radarr integration

* feat(backend): stub function for sonarr push

* feat(backend): store integrations synced to sonarr

* refactor(backend): use internal function for pushing data

* feat(backend): call function for sonarr

* feat(backend): send shows to sonarr

* fix(backend): send random title

* docs: remove extra text

* feat(database): add column to track external ids

* feat(backend): update external ids correctly

* refactor(backend): change name to match database schema

* feat(backend): fetch external identifiers for tmdb movies and shows

* fix(backend): use a better function for imports

* fix(backend): send tvdb id

* fix(backend): allow sending ids

* fix(backend): handle arr service push

* refactor(backend): extract into variable
  • Loading branch information
IgnisDa authored Jul 31, 2024
1 parent 90809a6 commit 66dff65
Show file tree
Hide file tree
Showing 28 changed files with 898 additions and 196 deletions.
261 changes: 248 additions & 13 deletions Cargo.lock

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions apps/backend/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ mime_guess = "=2.0.5"
nanoid = { workspace = true }
openidconnect = "=3.5.0"
paginate = "=1.1.11"
radarr-api-rs = "=3.0.1"
rand = "=0.9.0-alpha.1"
regex = "=1.10.5"
# FIXME: Upgrade once https://github.com/seanmonstar/reqwest/pull/1620 is merged
Expand All @@ -76,6 +77,7 @@ serde_json = { workspace = true }
serde_with = { version = "=3.9.0", features = ["chrono_0_4"] }
serde-xml-rs = "=0.6.0"
slug = "=0.1.5"
sonarr-api-rs = "=3.0.0"
strum = { workspace = true }
struson = { version = "=0.5.0", features = ["serde"] }
tokio = { version = "=1.38.1", features = ["full"] }
Expand Down
23 changes: 17 additions & 6 deletions apps/backend/src/background.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,17 @@ pub async fn background_jobs(
Ok(())
}

pub async fn yank_integrations_data(
pub async fn sync_integrations_data(
_information: ScheduledJob,
misc_service: Data<Arc<MiscellaneousService>>,
) -> Result<(), Error> {
tracing::trace!("Getting data from yanked integrations for all users");
misc_service.yank_integrations_data().await.unwrap();
tracing::trace!("Sending data for push integrations for all users");
misc_service
.send_data_for_push_integrations()
.await
.unwrap();
Ok(())
}

Expand All @@ -55,7 +60,7 @@ pub async fn yank_integrations_data(
// The background jobs which cannot be throttled.
#[derive(Debug, Deserialize, Serialize, Display)]
pub enum CoreApplicationJob {
YankIntegrationsData(String),
SyncIntegrationsData(String),
BulkProgressUpdate(String, Vec<ProgressUpdateInput>),
}

Expand All @@ -71,10 +76,16 @@ pub async fn perform_core_application_job(
tracing::trace!("Started job: {:#?}", name);
let start = Instant::now();
let status = match information {
CoreApplicationJob::YankIntegrationsData(user_id) => misc_service
.yank_integrations_data_for_user(&user_id)
.await
.is_ok(),
CoreApplicationJob::SyncIntegrationsData(user_id) => {
misc_service
.push_integrations_data_for_user(&user_id)
.await
.ok();
misc_service
.yank_integrations_data_for_user(&user_id)
.await
.is_ok()
}
CoreApplicationJob::BulkProgressUpdate(user_id, input) => misc_service
.bulk_progress_update(user_id, input)
.await
Expand Down
4 changes: 4 additions & 0 deletions apps/backend/src/entities/collection_to_entity.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
use uuid::Uuid;

use crate::models::CollectionToEntitySystemInformation;

#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "collection_to_entity")]
pub struct Model {
Expand All @@ -21,6 +23,8 @@ pub struct Model {
pub exercise_id: Option<String>,
pub workout_id: Option<String>,
pub information: Option<serde_json::Value>,
#[sea_orm(column_type = "Json")]
pub system_information: CollectionToEntitySystemInformation,
}

#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
Expand Down
13 changes: 7 additions & 6 deletions apps/backend/src/entities/integration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@
use async_graphql::{InputObject, SimpleObject};
use async_trait::async_trait;
use database::{IntegrationLot, IntegrationSource};
use database::{IntegrationLot, IntegrationProvider};
use nanoid::nanoid;
use sea_orm::{entity::prelude::*, ActiveValue};

use crate::models::media::IntegrationSourceSpecifics;
use crate::models::media::IntegrationProviderSpecifics;

#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, SimpleObject, InputObject)]
#[sea_orm(table_name = "integration")]
Expand All @@ -15,19 +15,20 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
#[graphql(skip_input)]
pub id: String,
pub minimum_progress: Decimal,
pub maximum_progress: Decimal,
pub minimum_progress: Option<Decimal>,
pub maximum_progress: Option<Decimal>,
#[graphql(skip)]
pub user_id: String,
pub lot: IntegrationLot,
pub source: IntegrationSource,
pub provider: IntegrationProvider,
pub is_disabled: Option<bool>,
#[graphql(skip_input)]
pub created_on: DateTimeUtc,
#[graphql(skip_input)]
pub last_triggered_on: Option<DateTimeUtc>,
#[sea_orm(column_type = "Json")]
pub source_specifics: Option<IntegrationSourceSpecifics>,
#[graphql(skip)]
pub provider_specifics: Option<IntegrationProviderSpecifics>,
}

#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
Expand Down
8 changes: 5 additions & 3 deletions apps/backend/src/entities/metadata.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@ use sea_orm::{entity::prelude::*, ActiveValue};
use serde::{Deserialize, Serialize};

use crate::models::media::{
AnimeSpecifics, AudioBookSpecifics, BookSpecifics, MangaSpecifics, MetadataFreeCreator,
MetadataImage, MetadataStateChanges, MetadataVideo, MovieSpecifics, PodcastSpecifics,
ShowSpecifics, VideoGameSpecifics, VisualNovelSpecifics, WatchProvider,
AnimeSpecifics, AudioBookSpecifics, BookSpecifics, ExternalIdentifiers, MangaSpecifics,
MetadataFreeCreator, MetadataImage, MetadataStateChanges, MetadataVideo, MovieSpecifics,
PodcastSpecifics, ShowSpecifics, VideoGameSpecifics, VisualNovelSpecifics, WatchProvider,
};

#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize, Default)]
Expand Down Expand Up @@ -41,6 +41,8 @@ pub struct Model {
pub free_creators: Option<Vec<MetadataFreeCreator>>,
#[sea_orm(column_type = "Json")]
pub watch_providers: Option<Vec<WatchProvider>>,
#[sea_orm(column_type = "Json")]
pub external_identifiers: Option<ExternalIdentifiers>,
pub audio_book_specifics: Option<AudioBookSpecifics>,
pub book_specifics: Option<BookSpecifics>,
pub movie_specifics: Option<MovieSpecifics>,
Expand Down
73 changes: 73 additions & 0 deletions apps/backend/src/integrations.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,33 @@ use std::future::Future;
use anyhow::{anyhow, bail, Result};
use async_graphql::Result as GqlResult;
use database::{MediaLot, MediaSource};
use radarr_api_rs::{
apis::{
configuration::{ApiKey as RadarrApiKey, Configuration as RadarrConfiguration},
movie_api::api_v3_movie_post as radarr_api_v3_movie_post,
},
models::{AddMovieOptions as RadarrAddMovieOptions, MovieResource as RadarrMovieResource},
};
use regex::Regex;
use reqwest::header::{HeaderValue, AUTHORIZATION};
use rust_decimal::Decimal;
use rust_decimal_macros::dec;
use sea_orm::{ColumnTrait, Condition, DatabaseConnection, EntityTrait, QueryFilter};
use sea_query::{extension::postgres::PgExpr, Alias, Expr, Func};
use serde::{Deserialize, Serialize};
use sonarr_api_rs::{
apis::{
configuration::{ApiKey as SonarrApiKey, Configuration as SonarrConfiguration},
series_api::api_v3_series_post as sonarr_api_v3_series_post,
},
models::{AddSeriesOptions as SonarrAddSeriesOptions, SeriesResource as SonarrSeriesResource},
};

use crate::{
entities::{metadata, prelude::Metadata},
models::{audiobookshelf_models, media::CommitMediaInput},
providers::google_books::GoogleBooksService,
traits::TraceOk,
utils::{get_base_http_client, ilike_sql},
};

Expand Down Expand Up @@ -517,4 +532,62 @@ impl IntegrationService {
}
Ok((media_items, vec![]))
}

pub async fn radarr_push(
&self,
radarr_base_url: String,
radarr_api_key: String,
radarr_profile_id: i32,
radarr_root_folder_path: String,
tmdb_id: String,
) -> Result<()> {
let mut configuration = RadarrConfiguration::new();
configuration.base_path = radarr_base_url;
configuration.api_key = Some(RadarrApiKey {
key: radarr_api_key,
prefix: None,
});
let mut resource = RadarrMovieResource::new();
resource.tmdb_id = Some(tmdb_id.parse().unwrap());
resource.quality_profile_id = Some(radarr_profile_id);
resource.root_folder_path = Some(Some(radarr_root_folder_path.clone()));
resource.monitored = Some(true);
let mut options = RadarrAddMovieOptions::new();
options.search_for_movie = Some(true);
resource.add_options = Some(Box::new(options));
radarr_api_v3_movie_post(&configuration, Some(resource))
.await
.trace_ok();
Ok(())
}

pub async fn sonarr_push(
&self,
sonarr_base_url: String,
sonarr_api_key: String,
sonarr_profile_id: i32,
sonarr_root_folder_path: String,
tvdb_id: String,
) -> Result<()> {
let mut configuration = SonarrConfiguration::new();
configuration.base_path = sonarr_base_url;
configuration.api_key = Some(SonarrApiKey {
key: sonarr_api_key,
prefix: None,
});
let mut resource = SonarrSeriesResource::new();
resource.title = Some(Some(tvdb_id.clone()));
resource.tvdb_id = Some(tvdb_id.parse().unwrap());
resource.quality_profile_id = Some(sonarr_profile_id);
resource.root_folder_path = Some(Some(sonarr_root_folder_path.clone()));
resource.monitored = Some(true);
resource.season_folder = Some(true);
let mut options = SonarrAddSeriesOptions::new();
options.search_for_missing_episodes = Some(true);
resource.add_options = Some(Box::new(options));
sonarr_api_v3_series_post(&configuration, Some(resource))
.await
.trace_ok();
Ok(())
}
}
10 changes: 5 additions & 5 deletions apps/backend/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ use utils::{COMPILATION_TIMESTAMP, TEMP_DIR};
use crate::{
background::{
background_jobs, perform_application_job, perform_core_application_job,
yank_integrations_data,
sync_integrations_data,
},
entities::prelude::Exercise,
graphql::get_schema,
Expand Down Expand Up @@ -102,7 +102,7 @@ async fn main() -> Result<()> {
.map(|f| f.parse().unwrap())
.collect_vec();
let rate_limit_count = config.scheduler.rate_limit_num;
let pull_every_minutes = config.integration.pull_every_minutes;
let sync_every_minutes = config.integration.sync_every_minutes;
let max_file_size = config.server.max_file_size;
let disable_background_jobs = config.server.disable_background_jobs;

Expand Down Expand Up @@ -273,18 +273,18 @@ async fn main() -> Result<()> {
)
.register_with_count(
1,
WorkerBuilder::new("yank_integrations_data")
WorkerBuilder::new("sync_integrations_data")
.stream(
CronStream::new_with_timezone(
Schedule::from_str(&format!("0 */{} * * * *", pull_every_minutes))
Schedule::from_str(&format!("0 */{} * * * *", sync_every_minutes))
.unwrap(),
tz,
)
.into_stream(),
)
.layer(ApalisTraceLayer::new())
.data(media_service_3.clone())
.build_fn(yank_integrations_data),
.build_fn(sync_integrations_data),
)
// application jobs
.register_with_count(
Expand Down
Loading

0 comments on commit 66dff65

Please sign in to comment.