Skip to content

Commit

Permalink
Prevent search engine indexing via robots.txt (#162)
Browse files Browse the repository at this point in the history
  • Loading branch information
harryzcy authored Jan 18, 2024
1 parent aadc8db commit b3cd88a
Showing 1 changed file with 30 additions and 0 deletions.
30 changes: 30 additions & 0 deletions src/router.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ pub fn router(data_path: String, artifact_path: String, db: database::Database)

Router::new()
.route("/", get(index_handler))
.route("/robots.txt", get(robots_handler))
.route("/ping", get(ping_handler))
.route("/repositories", get(list_repos_handler))
.route("/:server/:owner/:repo", get(list_commits_handler))
Expand All @@ -63,6 +64,10 @@ async fn index_handler() -> Html<&'static str> {
Html("<h1>Artifact Store</h1>")
}

async fn robots_handler() -> &'static str {
"User-agent: *\nDisallow: /"
}

async fn ping_handler() -> &'static str {
"pong"
}
Expand Down Expand Up @@ -211,6 +216,31 @@ mod tests {
std::fs::remove_dir_all("data/router/test_index_route").unwrap();
}

#[tokio::test]
async fn robots_route() {
let data_dir = String::from("data");
let artifact_path = String::from("data/artifacts");
let db = database::Database::new_rocksdb("data/router/test_robots_route").unwrap();
let app = router(data_dir, artifact_path, db);

let response = app
.oneshot(
Request::builder()
.uri("/robots.txt")
.body(Body::empty())
.unwrap(),
)
.await
.unwrap();

assert_eq!(response.status(), axum::http::StatusCode::OK);

let body = response.into_body().collect().await.unwrap().to_bytes();
assert_eq!(&body[..], b"User-agent: *\nDisallow: /");

std::fs::remove_dir_all("data/router/test_robots_route").unwrap();
}

#[tokio::test]
async fn ping_route() {
let data_dir = String::from("data");
Expand Down

0 comments on commit b3cd88a

Please sign in to comment.