Skip to content

feat(ci): add docker build dry run #3

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Jan 27, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 52 additions & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
name: Docker dry run

on:
push:
pull_request:
workflow_dispatch:

schedule:
- cron: '0 0 * * *' # every day at 00:00

jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4

- name: Prepare
id: prep
run: |
TAG=$(echo $GITHUB_SHA | head -c7)
IMAGE="klickhouse-example-img"
echo "tagged_image=${IMAGE}:${TAG}" >> $GITHUB_OUTPUT
echo "tag=${TAG}" >> $GITHUB_OUTPUT

- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3
with:
install: true

- name: Cache Docker layers
uses: actions/cache@v4
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-

- name: Docker build
uses: docker/build-push-action@v5
with:
builder: ${{ steps.buildx.outputs.name }}
push: false
context: .
tags: ${{ steps.prep.outputs.tagged_image }}
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,mode=max,dest=/tmp/.buildx-cache-new

- name: Move cache
run: |
rm -rf /tmp/.buildx-cache
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ RUN --mount=type=cache,target=/app/target \
FROM scratch

# Copy compiled application
COPY --from=builder /app/target/x86_64-unknown-linux-musl/release/klickhouse_example /klickhouse_example
COPY --from=builder /app/klickhouse_example /klickhouse_example
# Copy application config
COPY ./confik.toml .

Expand Down
4 changes: 3 additions & 1 deletion aarch64.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,9 @@ RUN --mount=type=cache,target=/app/target \
FROM scratch

# copy compiled application
COPY --from=builder /app/target/aarch64-unknown-linux-musl/release/klickhouse_example /klickhouse_example
COPY --from=builder /app/klickhouse_example /klickhouse_example
# Copy application config
COPY ./confik.toml .

# specify that the application is started as PID 1
ENTRYPOINT ["/klickhouse_example"]
6 changes: 6 additions & 0 deletions src/web/app_state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,15 @@ impl AppState {
.await
.map_err(|e| eyre!("Failed to connect to Clickhouse: {}", e))?;

pool.check_pool().await?;

Ok(pool)
}

pub async fn check_clickhouse_connection(&self) -> Result<()> {
self.clickhouse_pool.check_pool().await
}

pub fn config(&self) -> &AppConfig {
&self.config
}
Expand Down
4 changes: 0 additions & 4 deletions src/web/global_panic_handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@ pub fn setup_global_panic_handler(app_state: Data<AppState>) {
.map(ToString::to_string)
.unwrap_or_else(|| "Unknown panic".to_string());

println!("Global panic handler called #2");

tokio::task::spawn(async move {
// Create log
let log = WebServerLog {
Expand All @@ -32,8 +30,6 @@ pub fn setup_global_panic_handler(app_state: Data<AppState>) {
response_time: 0.0,
};

println!("Global panic handler called #3");

// Write log to Clickhouse
if let Err(e) = app_state.ch_logger().log(log).await {
eprintln!("Failed to log panic to ClickHouse: {:?}", e);
Expand Down
16 changes: 10 additions & 6 deletions src/web/handlers/mod.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,12 @@
use actix_web::{get, HttpResponse, Responder};
use crate::web::app_state::AppState;
use actix_web::{get, web, HttpResponse, Responder};
use tracing_actix_web::RequestId;

#[get("/")]
pub async fn index(request_id: RequestId) -> impl Responder {
format!("request_id: {}", request_id)
}

#[get("/health")]
pub async fn health() -> impl Responder {
"I'm alive!"
}

#[get("/fail")]
#[allow(clippy::unnecessary_literal_unwrap)]
pub async fn fail_endpoint() -> impl Responder {
Expand All @@ -19,3 +15,11 @@ pub async fn fail_endpoint() -> impl Responder {
let value = result.unwrap();
HttpResponse::Ok().body(format!("Value: {}", value))
}

#[get("/health")]
pub async fn health(app_state: web::Data<AppState>) -> impl Responder {
match app_state.check_clickhouse_connection().await {
Ok(_) => HttpResponse::Ok().body("I'm alive!"),
Err(e) => HttpResponse::InternalServerError().body(format!("Error: {}", e)),
}
}
7 changes: 4 additions & 3 deletions src/web/startup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,16 @@ use crate::web::global_panic_handler::setup_global_panic_handler;
use crate::web::handlers::{fail_endpoint, health, index};

pub async fn run_serve(config: AppConfig) -> Result<actix_web::dev::Server> {
let app_state = web::Data::new(AppState::build(config).await?);
setup_global_panic_handler(app_state.clone());
let app_state = AppState::build(config).await?;
let data_app_state = web::Data::new(app_state.clone());
setup_global_panic_handler(data_app_state.clone());

let port = app_state.config().http_port;
let addr = format!("0.0.0.0:{}", port);

let server = HttpServer::new(move || {
App::new()
.app_data(app_state.clone())
.app_data(data_app_state.clone())
.wrap(TracingLogger::default())
.wrap(NormalizePath::new(
actix_web::middleware::TrailingSlash::Trim,
Expand Down
Loading