Add _catalog endpoints for listing repositories
This commit is contained in:
parent
16da8aa190
commit
82774a4931
|
@ -0,0 +1,66 @@
|
|||
use actix_web::{HttpResponse, web, get, HttpRequest};
|
||||
use qstring::QString;
|
||||
use serde::{Serialize, Deserialize};
|
||||
|
||||
use crate::{app_state::AppState, database::Database};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RepositoryList {
|
||||
repositories: Vec<String>,
|
||||
}
|
||||
|
||||
#[get("")]
|
||||
pub async fn list_repositories(req: HttpRequest, state: web::Data<AppState>) -> HttpResponse {
|
||||
// Get limit and last tag from query params if present.
|
||||
let qs = QString::from(req.query_string());
|
||||
let limit = qs.get("n");
|
||||
let last_repo = qs.get("last");
|
||||
|
||||
let mut link_header = None;
|
||||
|
||||
// Paginate tag results if n was specified, else just pull everything.
|
||||
let database = &state.database;
|
||||
let repositories = match limit {
|
||||
Some(limit) => {
|
||||
let limit: u32 = limit.parse().unwrap();
|
||||
|
||||
// Convert the last param to a String, and list all the repos
|
||||
let last_repo = last_repo.and_then(|t| Some(t.to_string()));
|
||||
let repos = database.list_repositories(Some(limit), last_repo).await.unwrap();
|
||||
|
||||
// Get the new last repository for the response
|
||||
let last_repo = repos.last().and_then(|s| Some(s.clone()));
|
||||
|
||||
// Construct the link header
|
||||
let url = req.uri().to_string();
|
||||
let mut url = format!("<{}/v2/_catalog?n={}", url, limit);
|
||||
if let Some(last_repo) = last_repo {
|
||||
url += &format!("&limit={}", last_repo);
|
||||
}
|
||||
url += ">; rel=\"next\"";
|
||||
link_header = Some(url);
|
||||
|
||||
repos
|
||||
},
|
||||
None => {
|
||||
database.list_repositories(None, None).await.unwrap()
|
||||
}
|
||||
};
|
||||
|
||||
// Convert the `Vec<Tag>` to a `TagList` which will be serialized to json.
|
||||
let repo_list = RepositoryList {
|
||||
repositories,
|
||||
};
|
||||
let response_body = serde_json::to_string(&repo_list).unwrap();
|
||||
|
||||
// Construct the response, optionally adding the Link header if it was constructed.
|
||||
let mut resp = HttpResponse::Ok();
|
||||
resp.append_header(("Content-Type", "application/json"));
|
||||
|
||||
if let Some(link_header) = link_header {
|
||||
resp.append_header(("Link", link_header));
|
||||
}
|
||||
|
||||
resp.body(response_body)
|
||||
}
|
|
@ -2,6 +2,7 @@ pub mod blobs;
|
|||
pub mod uploads;
|
||||
pub mod manifests;
|
||||
pub mod tags;
|
||||
pub mod catalog;
|
||||
|
||||
use actix_web::{HttpResponse, get};
|
||||
|
||||
|
|
|
@ -28,20 +28,25 @@ pub async fn list_tags(path: web::Path<(String, )>, req: HttpRequest, state: web
|
|||
Some(limit) => {
|
||||
let limit: u32 = limit.parse().unwrap();
|
||||
|
||||
// Convert the last param to a String, and list all the tags
|
||||
let last_tag = last_tag.and_then(|t| Some(t.to_string()));
|
||||
let tags = database.list_repository_tags_page(&name, limit, last_tag).await.unwrap();
|
||||
|
||||
// Get the new last repository for the response
|
||||
let last_tag = tags.last();
|
||||
|
||||
// Construct the link header
|
||||
let mut url = format!("/v2/{}/tags/list?n={}", name, limit);
|
||||
if let Some(last_tag) = last_tag.clone() {
|
||||
url += &format!("&limit={}", last_tag);
|
||||
let url = req.uri().to_string();
|
||||
let mut url = format!("<{}/v2/{}/tags/list?n={}", url, name, limit);
|
||||
if let Some(last_tag) = last_tag {
|
||||
url += &format!("&limit={}", last_tag.name);
|
||||
}
|
||||
url += ";rel=\"next\"";
|
||||
url += ">; rel=\"next\"";
|
||||
link_header = Some(url);
|
||||
|
||||
database.list_repository_tags_page(&name, limit, last_tag).await.unwrap()
|
||||
tags
|
||||
},
|
||||
None => {
|
||||
let database = &state.database;
|
||||
database.list_repository_tags(&name).await.unwrap()
|
||||
}
|
||||
};
|
||||
|
|
|
@ -54,8 +54,9 @@ pub trait Database {
|
|||
|
||||
/// Create a repository
|
||||
async fn save_repository(&self, repository: &str) -> sqlx::Result<()>;
|
||||
/// List all repositories
|
||||
async fn list_repositories(&self) -> sqlx::Result<Vec<String>>;
|
||||
/// List all repositories.
|
||||
/// If limit is not specified, a default limit of 1000 will be returned.
|
||||
async fn list_repositories(&self, limit: Option<u32>, last_repo: Option<String>) -> sqlx::Result<Vec<String>>;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
|
@ -285,11 +286,27 @@ impl Database for Pool<Sqlite> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn list_repositories(&self) -> sqlx::Result<Vec<String>> {
|
||||
let repos: Vec<(String, )> = sqlx::query_as("SELECT name FROM repositories")
|
||||
.fetch_all(self).await?;
|
||||
// Move out of repos
|
||||
let repos = repos.into_iter().map(|row| row.0).collect();
|
||||
//async fn list_repositories(&self) -> sqlx::Result<Vec<String>> {
|
||||
async fn list_repositories(&self, limit: Option<u32>, last_repo: Option<String>) -> sqlx::Result<Vec<String>> {
|
||||
let limit = limit.unwrap_or(1000); // set default limit
|
||||
|
||||
// Query differently depending on if `last_repo` was specified
|
||||
let rows: Vec<(String, )> = match last_repo {
|
||||
Some(last_repo) => {
|
||||
sqlx::query_as("SELECT name FROM repositories WHERE name > ? ORDER BY name LIMIT ?")
|
||||
.bind(last_repo)
|
||||
.bind(limit)
|
||||
.fetch_all(self).await?
|
||||
},
|
||||
None => {
|
||||
sqlx::query_as("SELECT name FROM repositories ORDER BY name LIMIT ?")
|
||||
.bind(limit)
|
||||
.fetch_all(self).await?
|
||||
}
|
||||
};
|
||||
|
||||
// "unwrap" the tuple from the rows
|
||||
let repos: Vec<String> = rows.into_iter().map(|row| row.0).collect();
|
||||
|
||||
Ok(repos)
|
||||
}
|
||||
|
|
|
@ -40,6 +40,10 @@ async fn main() -> std::io::Result<()> {
|
|||
.service(
|
||||
web::scope("/v2")
|
||||
.service(api::version_check)
|
||||
.service(
|
||||
web::scope("/_catalog")
|
||||
.service(api::catalog::list_repositories)
|
||||
)
|
||||
.service(
|
||||
web::scope("/{name}")
|
||||
.service(
|
||||
|
|
Loading…
Reference in New Issue