Skip to content

Commit c1a39d4

Browse files
committed
feat: configured docker image
1 parent 78e0c56 commit c1a39d4

19 files changed

+396
-184
lines changed

.dockerignore

+1
Original file line numberDiff line numberDiff line change
@@ -32,3 +32,4 @@
3232
**/values.dev.yaml
3333
LICENSE
3434
README.md
35+
database

Dockerfile

+1-1
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ RUN --mount=type=cache,target=/root/.cache/pip \
3737
python -m pip install -r requirements.txt
3838

3939
# Switch to the non-privileged user to run the application.
40-
USER appuser
40+
# USER appuser
4141

4242
# Copy the source code into the container.
4343
COPY . .

README.md

+3-1
Original file line numberDiff line numberDiff line change
@@ -16,4 +16,6 @@ FROM submission s
1616
INNER JOIN breakdown ON s.id = breakdown.id
1717
GROUP BY sub_month, sub_year
1818
ORDER BY sub_month, sub_year;
19-
```
19+
```
20+
21+
uvicorn main:app --reload

analytics.py

+4-32
Original file line numberDiff line numberDiff line change
@@ -13,15 +13,17 @@
1313
from nltk.tokenize import word_tokenize
1414
from nrclex import NRCLex
1515
from sqlmodel import Session, create_engine, select
16-
from endpoints.submission_api import SubmissionAPI
1716

17+
from endpoints.database_config import DatabaseConfig
18+
from endpoints.submission_api import SubmissionAPI
1819
from models.submission import Submission
1920
from models.summary import Summary
2021

2122

2223
class AnalyticsProcessor:
2324
def __init__(self):
24-
self.api = SubmissionAPI()
25+
engine = DatabaseConfig().get_engine()
26+
self.api = SubmissionAPI(engine)
2527

2628
def process(self, submissions):
2729
afinn = Afinn()
@@ -153,36 +155,6 @@ def generate_search(self):
153155

154156
self.write_to_file(json.dumps(indexes), "search")
155157

156-
def generate_top(self):
157-
indexes = []
158-
159-
sqlite_file_name = "AmItheAsshole.db"
160-
sqlite_url = f"sqlite:///database//{sqlite_file_name}"
161-
engine = create_engine(sqlite_url, echo=False)
162-
with Session(engine) as session:
163-
statement = select(Submission)
164-
results = session.exec(statement)
165-
for submission in results:
166-
entry = dict()
167-
entry["id"] = submission.id
168-
entry["scores"] = submission.score
169-
entry["created_utc"] = submission.created_utc
170-
171-
try:
172-
summary: Summary = self.api.read_summary(submission.id)
173-
entry["nta"] = summary.counts["nta_count"]
174-
entry["yta"] = summary.counts["yta_count"]
175-
entry["esh"] = summary.counts["esh_count"]
176-
entry["info"] = summary.counts["info_count"]
177-
entry["nah"] = summary.counts["nah_count"]
178-
except Exception:
179-
continue
180-
indexes.append(entry)
181-
182-
self.write_to_file(json.dumps(indexes), "top")
183-
184-
return indexes
185-
186158
def write_to_file(self, json, file_name):
187159
f = open("./endpoints/static/" + str(file_name) + ".json", "w")
188160
f.write(json)

compose.yaml

+4-1
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,10 @@ services:
1313
context: .
1414
ports:
1515
- 8000:8000
16+
env_file:
17+
- .env
18+
volumes:
19+
- ./database/:/app/database
1620

1721
# The commented out section below is an example of how to define a PostgreSQL
1822
# database that your application can use. `depends_on` tells Docker Compose to
@@ -46,4 +50,3 @@ services:
4650
# secrets:
4751
# db-password:
4852
# file: db/password.txt
49-

crawler.py

+8-3
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import logging
44
import os
55
from endpoints.comment_api import CommentAPI
6+
from endpoints.database_config import DatabaseConfig
67
from endpoints.submission_api import SubmissionAPI
78
from models.comment import Comment
89
from models.submission import Submission
@@ -32,10 +33,14 @@ def crawl(self) -> None:
3233
user_agent=self.agent,
3334
)
3435

35-
submission_api = SubmissionAPI()
36-
comment_api = CommentAPI()
36+
db_config = DatabaseConfig()
3737

38-
for submission in reddit.subreddit(self.subreddit_name).new(
38+
engine = db_config.get_engine()
39+
40+
submission_api = SubmissionAPI(engine)
41+
comment_api = CommentAPI(engine)
42+
43+
for submission in reddit.subreddit(self.subreddit_name).hot(
3944
limit=self.post_limit
4045
):
4146
custom_submission: Submission = Submission()

endpoints/breakdown_api.py

+4-19
Original file line numberDiff line numberDiff line change
@@ -1,32 +1,17 @@
1-
import logging
2-
import os
31
from fastapi import APIRouter
2+
from sqlalchemy import Engine
3+
from sqlmodel import Session
44

5-
from dotenv import dotenv_values
6-
from sqlmodel import Session, SQLModel, create_engine
75
from models.breakdown import Breakdown
86

97

108
class BreakdownAPI:
11-
def __init__(self):
12-
logging.basicConfig(level=os.environ.get("LOGLEVEL", "INFO"))
13-
14-
self._configure_database()
15-
self._create_db_and_tables()
9+
def __init__(self, engine: Engine):
10+
self.engine = engine
1611
self.router = APIRouter()
1712

1813
self._setup_breakdown_routes()
1914

20-
def _create_db_and_tables(self):
21-
SQLModel.metadata.create_all(self.engine)
22-
23-
def _configure_database(self):
24-
config = dotenv_values(".env")
25-
self.sqlite_file_name = config.get("DATABASE_NAME")
26-
self.sqlite_url = f"sqlite:///database//{self.sqlite_file_name}"
27-
28-
self.engine = create_engine(self.sqlite_url, echo=False)
29-
3015
def _setup_breakdown_routes(self) -> None:
3116
...
3217

endpoints/comment_api.py

+5-10
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,15 @@
1-
import logging
2-
import os
3-
from fastapi import APIRouter, HTTPException, Query
4-
1+
from sqlalchemy import Engine
52
from dotenv import dotenv_values
3+
from fastapi import APIRouter, HTTPException, Query
64
from sqlmodel import Session, SQLModel, create_engine, select
5+
76
from models.comment import Comment
87

98

109
class CommentAPI:
11-
def __init__(self):
12-
logging.basicConfig(level=os.environ.get("LOGLEVEL", "INFO"))
13-
14-
self._configure_database()
15-
self._create_db_and_tables()
10+
def __init__(self, engine: Engine):
11+
self.engine = engine
1612
self.router = APIRouter()
17-
1813
self._setup_comment_routes()
1914

2015
def _create_db_and_tables(self):

endpoints/database_config.py

+35
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
import logging
2+
import os
3+
4+
5+
from dotenv import load_dotenv, find_dotenv
6+
from sqlalchemy import Engine
7+
from sqlmodel import SQLModel, create_engine
8+
9+
10+
class DatabaseConfig:
11+
_instance = None
12+
13+
def _setup_database(self):
14+
load_dotenv(find_dotenv())
15+
# config = dotenv_values(".env")
16+
sqlite_file_name = os.environ.get("DATABASE_NAME")
17+
sqlite_url = f"sqlite:///database//{sqlite_file_name}"
18+
19+
print(sqlite_url)
20+
21+
self.engine = create_engine(sqlite_url, echo=False)
22+
23+
SQLModel.metadata.create_all(self.engine)
24+
25+
def get_engine(self) -> Engine:
26+
return self.engine
27+
28+
def __new__(cls):
29+
if cls._instance is None:
30+
cls._instance = super(DatabaseConfig, cls).__new__(cls)
31+
logging.basicConfig(level=os.environ.get("LOGLEVEL", "INFO"))
32+
33+
cls._instance._setup_database()
34+
35+
return cls._instance

endpoints/openai_inference_api.py

+5-19
Original file line numberDiff line numberDiff line change
@@ -1,33 +1,19 @@
11
import logging
2-
import os
32
from fastapi import APIRouter, HTTPException
43

5-
from dotenv import dotenv_values
6-
from sqlmodel import Session, SQLModel, create_engine
4+
from sqlalchemy import Engine
5+
from sqlmodel import Session
76

87
from models.openai_analytics import OpenAIAnalysis
98

109

1110
class OpenAIInferenceAPI:
12-
def __init__(self):
13-
logging.basicConfig(level=os.environ.get("LOGLEVEL", "INFO"))
14-
15-
self._configure_database()
16-
self._create_db_and_tables()
11+
def __init__(self, engine: Engine):
12+
self.engine = engine
1713
self.router = APIRouter()
1814

1915
self._setup_openai_analysis_routes()
2016

21-
def _create_db_and_tables(self):
22-
SQLModel.metadata.create_all(self.engine)
23-
24-
def _configure_database(self):
25-
config = dotenv_values(".env")
26-
self.sqlite_file_name = config.get("DATABASE_NAME")
27-
self.sqlite_url = f"sqlite:///database//{self.sqlite_file_name}"
28-
29-
self.engine = create_engine(self.sqlite_url, echo=False)
30-
3117
def _setup_openai_analysis_routes(self) -> None:
3218
self.router.add_api_route(
3319
"/openai_analysis/{id}",
@@ -45,7 +31,7 @@ def create_opeai_analysis(self, open_ai_analysis: OpenAIAnalysis):
4531
session.refresh(open_ai_analysis)
4632
return open_ai_analysis
4733

48-
def read_openai_inference(self, id: int):
34+
def read_openai_inference(self, id: int) -> OpenAIAnalysis:
4935
with Session(self.engine) as session:
5036
open_ai_inference = session.get(OpenAIAnalysis, id)
5137
if not open_ai_inference:

endpoints/static/top.json

-1
This file was deleted.

endpoints/static_api.py

+2-12
Original file line numberDiff line numberDiff line change
@@ -3,24 +3,14 @@
33
import os
44

55
from fastapi import APIRouter
6-
from sqlmodel import SQLModel, create_engine
76

87

98
class StaticAPI:
10-
def create_db_and_tables(self):
11-
SQLModel.metadata.create_all(self.engine)
12-
13-
def configure_database(self):
14-
self.sqlite_file_name = "AmItheAsshole.db"
15-
self.sqlite_url = f"sqlite:///database//{self.sqlite_file_name}"
16-
17-
self.engine = create_engine(self.sqlite_url, echo=False)
18-
199
def __init__(self):
2010
logging.basicConfig(level=os.environ.get("LOGLEVEL", "INFO"))
2111

22-
self.configure_database()
23-
self.create_db_and_tables()
12+
# self.configure_database()
13+
# self.create_db_and_tables()
2414
self.router = APIRouter()
2515

2616
self._setup_breakdown_routes()

0 commit comments

Comments
 (0)