Skip to content

Commit

Permalink
fix: removed sqlx prepare dependency
Browse files Browse the repository at this point in the history
  • Loading branch information
yellowHatpro committed Sep 10, 2024
1 parent 34d269c commit 8693088
Show file tree
Hide file tree
Showing 9 changed files with 49 additions and 117 deletions.
5 changes: 3 additions & 2 deletions .env.example
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
# Rename this file to .env for development
RUN_MODE=development
# Rename this file to .env for development, DATABASE_URL will be needed by sqlx to run cargo test
RUN_MODE=development
DATABASE_URL=postgres://musicbrainz:musicbrainz@localhost:5432/musicbrainz_db

This file was deleted.

This file was deleted.

This file was deleted.

1 change: 0 additions & 1 deletion docker/Dockerfile.dev
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ WORKDIR /app
COPY . .

ENV RUSTFLAGS='-C target-feature=+crt-static'
ENV SQLX_OFFLINE=true

# Build the project in debug mode and place the binary in /app
RUN --mount=type=cache,target=/usr/local/cargo/registry \
Expand Down
24 changes: 10 additions & 14 deletions docker/Dockerfile.prod
Original file line number Diff line number Diff line change
@@ -1,22 +1,18 @@
FROM rust:latest AS builder
FROM metabrainz/base-image:latest

WORKDIR /app

COPY . .
RUN apt-get update && \
apt-get install --no-install-suggests --no-install-recommends -y \
rustc cargo build-essential unzip curl wget iputils-ping pkg-config libssl-dev \
&& apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*

ENV RUSTFLAGS='-C target-feature=+crt-static'
ENV SQLX_OFFLINE=true

RUN cargo build --release --target x86_64-unknown-linux-gnu && \
cp ./target/x86_64-unknown-linux-gnu/release/mb-ia /mb-ia
FROM scratch
ENV RUN_MODE=production

WORKDIR /app
COPY --from=builder /mb-ia ./app

COPY --from=builder /app/config /app/config

ENV RUN_MODE=production
COPY . .

CMD ["/app/app"]
RUN cargo build --release --target x86_64-unknown-linux-gnu && \
cp ./target/x86_64-unknown-linux-gnu/release/mb-ia /app/mb-ia

CMD ["/app/mb-ia"]
8 changes: 2 additions & 6 deletions docs/INSTALL.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
> - Rename `config/default.example.toml` to `config/default.toml`, `config/development.example.toml` to `config/development.toml` and `config/production.example.toml` to `config/production.toml`.
> - Ensure [yq](https://github.com/mikefarah/yq) is installed, for using configs in the sql scripts.
> - After ensuring musicbrainz_db is running on port 5432, Run the script `init_db.sh` in scripts dir.
> - In `config/development.toml` file, make sure to create a sentry rust project, enter your sentry project [DSN](https://docs.sentry.io/platforms/rust/#configure) (Data Source Name) in the `url` key's value. Make sure to not forget this step, else the program will panic.
> - In `config/development.toml` file, create a sentry rust project, enter your sentry project [DSN](https://docs.sentry.io/platforms/rust/#configure) (Data Source Name) in the `url` key's value.
> - Get the Internet Archive API accesskey and secret from [here](https://archive.org/account/s3.php) (requires sign in). Paste them in `config/development.toml` file `[wayback_machine_api]`'s variables `myaccesskey` and `mysecret`.
There are 2 methods to run the program:
Expand All @@ -18,11 +18,7 @@ There are 2 methods to run the program:
```
2. Use the Dockerfile
- Note that the container has to run in the same network as musicbrainz db network bridge.
1. ```shell
cargo sqlx prepare
```

2. ```shell
```shell
docker-compose -f docker/docker-compose.dev.yml up --build
```

Expand Down
3 changes: 0 additions & 3 deletions docs/MAINTENANCE.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,6 @@ This doc provides instructions, guidelines and references to maintain the projec
## Schema Guidelines

- Since the project depends on `musicbrainz_db`, therefore, make sure all the `CREATE TABLE musicbrainz.*` instructions, present in `scripts/sql` scripts are in sync with MusicBrainz database schema.
- A few queries uses [compile-time semantic verifications](https://github.com/launchbadge/sqlx?tab=readme-ov-file#compile-time-verification). To make this work, we have `.sqlx` folder present which contains query metadata for such use case. These could change if the schema changes, so to keep things in sync:
1. Make sure [sqlx-cli](https://github.com/launchbadge/sqlx/blob/main/sqlx-cli/README.md) is installed.
2. Run `cargo sqlx prepare`.

## Monitoring
- While editing the grafana dashboard, make sure to update `grafana/dashboards/metrics-dashboard.json` file with the corresponding changes.
Expand Down
57 changes: 34 additions & 23 deletions src/cli/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,28 @@ use crate::poller;
use crate::poller::utils::should_insert_url_to_internet_archive_urls;
use colorize::AnsiColor;
use mb_rs::schema::{EditData, EditNote};
use sqlx::{Error, PgPool};
use sqlx::{Error, PgPool, Row};

//TODO: Currently I am returning the internet_archive_urls row id when I insert any URL. Now there might be URLs which are already saved, hence instead of row id, show how many URLs are still there unprocessed, and is before the currently inserted one.
pub async fn insert_url_to_internet_archive_urls(url: &str, pool: &PgPool) -> Result<i32, Error> {
sqlx::query!(
pub async fn insert_url_to_internet_archive_urls(
url: &str,
pool: &PgPool,
) -> Result<i32, sqlx::Error> {
let row = sqlx::query(
r#"
INSERT INTO external_url_archiver.internet_archive_urls (url, retry_count)
VALUES ($1, 0)
RETURNING id
"#,
url
"#,
)
.bind(url)
.fetch_one(pool)
.await
.map(|result| result.id)
.await?;

// Get the id from the returned row
let id: i32 = row.try_get("id")?;

Ok(id)
}

pub async fn check_before_inserting_url(url: &str, pool: &PgPool) -> Result<bool, Error> {
Expand All @@ -43,17 +50,19 @@ pub async fn insert_edit_data_row_to_internet_archive_urls(

let urls = poller::utils::extract_url_from_edit_data(&edit_data_row, pool).await;
for url in &urls {
let id = sqlx::query!(
r#"
let row = sqlx::query(
r#"
INSERT INTO external_url_archiver.internet_archive_urls (url, from_table, from_table_id, retry_count)
VALUES ($1, 'edit_data', $2, 0)
RETURNING id
"#,
url,
edit_data_row.edit
).fetch_one(pool)
.await?
.id;
"#
)
.bind(url)
.bind(edit_data_row.edit)
.fetch_one(pool)
.await?;
let id: i32 = row.try_get("id")?;

println!("{} {} {}", "URL enqueued and id: ".green(), url, id);
}
Ok(!urls.is_empty())
Expand All @@ -76,17 +85,19 @@ pub async fn insert_edit_note_row_to_internet_archive_urls(

let urls = poller::utils::extract_urls_from_text(&edit_note_row.text);
for url in &urls {
let id = sqlx::query!(
r#"
let row = sqlx::query(
r#"
INSERT INTO external_url_archiver.internet_archive_urls (url, from_table, from_table_id, retry_count)
VALUES ($1, 'edit_note', $2, 0)
RETURNING id
"#,
url,
edit_note_row.edit
).fetch_one(pool)
.await?
.id;
"#
)
.bind(url)
.bind(edit_note_row.edit)
.fetch_one(pool)
.await?;

let id: i32 = row.try_get("id")?;
println!("{} {} {}", "URL enqueued and id: ".green(), url, id);
}
Ok(!urls.is_empty())
Expand Down

0 comments on commit 8693088

Please sign in to comment.