Skip to content

Commit

Permalink
1055 improve the ux of the export process for new users [WIP] (#1093)
Browse files Browse the repository at this point in the history
  • Loading branch information
michalkrzem authored Jul 3, 2024
1 parent a531d89 commit 6ba9b45
Show file tree
Hide file tree
Showing 17 changed files with 425 additions and 13 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -145,3 +145,5 @@ cython_debug/

# Development Docker Compose file
docker-compose.dev.yaml
karton-logs
output
40 changes: 40 additions & 0 deletions alembic.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
[alembic]
script_location = migrations
prepend_sys_path = .
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.

[post_write_hooks]

[loggers]
keys = root,sqlalchemy,alembic

[handlers]
keys = console

[formatters]
keys = generic

[logger_root]
level = WARN
handlers = console
qualname =

[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine

[logger_alembic]
level = INFO
handlers =
qualname = alembic

[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic

[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
4 changes: 4 additions & 0 deletions artemis/csrf.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,11 @@
from fastapi_csrf_protect.exceptions import CsrfProtectError
from pydantic import BaseModel

from artemis.db import DB
from artemis.templating import templates

db = DB()


def generate_csrf_secret() -> str:
csrf_secret_path = "/data/csrf_secret"
Expand Down Expand Up @@ -44,6 +47,7 @@ def get_csrf_config() -> CsrfSettings:
def csrf_form_template_response(template_name: str, context: Dict[str, Any], csrf_protect: CsrfProtect) -> Response:
csrf_token, signed_token = csrf_protect.generate_csrf_tokens()
context["csrf_token"] = csrf_token
context["tag_names"] = db.get_tags()
response = templates.TemplateResponse(template_name, context)
csrf_protect.set_csrf_cookie(signed_token, response)
return response
Expand Down
23 changes: 21 additions & 2 deletions artemis/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import os
import shutil
from enum import Enum
from typing import Any, Dict, Generator, List, Optional
from typing import Any, Dict, Generator, List, Optional, Type

from karton.core import Task
from pydantic import BaseModel
Expand Down Expand Up @@ -138,6 +138,14 @@ class ReportGenerationTask(Base): # type: ignore
alerts = Column(JSON, nullable=True)


class Tag(Base): # type: ignore
__tablename__ = "tag"

id = Column(Integer, primary_key=True)
tag_name = Column(String, index=True, unique=True)
created_at = Column(DateTime, server_default=text("NOW()"))


@dataclasses.dataclass
class PaginatedResults:
records_count_total: int
Expand Down Expand Up @@ -179,7 +187,6 @@ def __init__(self) -> None:
Config.Data.POSTGRES_CONN_STR, json_serializer=functools.partial(json.dumps, cls=JSONEncoderAdditionalTypes)
)
self.session = sessionmaker(bind=self._engine)
Base.metadata.create_all(bind=self._engine, checkfirst=True)

def list_analysis(self) -> List[Dict[str, Any]]:
with self.session() as session:
Expand Down Expand Up @@ -525,3 +532,15 @@ def _strip_internal_db_info(self, d: Dict[str, Any]) -> Dict[str, Any]:
if "headers_string" in d:
del d["headers_string"]
return d

def save_tag(self, tag_name: str | None) -> None:
if tag_name is not None:
statement = postgres_insert(Tag).values(tag_name=tag_name)
statement = statement.on_conflict_do_nothing(index_elements=[Tag.tag_name])
with self.session() as session:
session.execute(statement)
session.commit()

def get_tags(self) -> List[Type[Tag]] | Any:
with self.session() as session:
return session.query(Tag).all()
1 change: 1 addition & 0 deletions artemis/producer.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,4 +26,5 @@ def create_tasks(
task.add_payload("disabled_modules", disabled_modules, persistent=True)
db.create_analysis(task)
db.save_scheduled_task(task)
db.save_tag(tag)
producer.send_task(task)
4 changes: 2 additions & 2 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ services:

autoreporter:
<<: *artemis-build-or-image
command: "bash -c '/wait-for-it.sh postgres:5432 -- python3 -m artemis.reporting.task_handler'"
command: "bash -c '/wait-for-it.sh postgres:5432 -- alembic upgrade head && python3 -m artemis.reporting.task_handler'"
env_file: .env
restart: always
volumes:
Expand All @@ -81,7 +81,7 @@ services:
env_file: .env
volumes:
- ./archived-task-results/:/opt/archived-task-results/
command: "bash -c '/wait-for-it.sh postgres:5432 -- python3 -m artemis.autoarchiver.autoarchiver'"
command: "bash -c '/wait-for-it.sh postgres:5432 -- alembic upgrade head && python3 -m artemis.autoarchiver.autoarchiver'"
restart: always

web:
Expand Down
4 changes: 3 additions & 1 deletion docker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,9 @@ RUN apk add --no-cache --virtual .build-deps go gcc git libc-dev make libffi-dev
WORKDIR /opt

COPY "artemis/" "artemis/"
COPY "alembic.ini" "alembic.ini"
COPY "migrations/" "migrations/"
COPY "static/" "static/"
COPY "templates/" "templates/"

CMD ["uvicorn", "artemis.main:app", "--host", "0.0.0.0", "--port", "5000"]
CMD ["bash", "-c", "alembic upgrade head && uvicorn artemis.main:app --host 0.0.0.0 --port 5000"]
1 change: 1 addition & 0 deletions migrations/README
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Generic single-database configuration.
80 changes: 80 additions & 0 deletions migrations/env.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
# type: ignore
from logging.config import fileConfig

from alembic import context
from sqlalchemy import engine_from_config, pool

from artemis.config import Config
from artemis.db import Base

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata

target_metadata = Base.metadata
# target_metadata = None

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.

DATABASE_URL = Config.Data.POSTGRES_CONN_STR


def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
context.configure(
url=DATABASE_URL,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)

with context.begin_transaction():
context.run_migrations()


def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
{"sqlalchemy.url": DATABASE_URL},
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)

with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)

with context.begin_transaction():
context.run_migrations()


if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
24 changes: 24 additions & 0 deletions migrations/script.py.mako
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
"""${message}

Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}

"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}

# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}


def upgrade():
${upgrades if upgrades else "pass"}


def downgrade():
${downgrades if downgrades else "pass"}
41 changes: 41 additions & 0 deletions migrations/versions/40355237ae7c_tag_migration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
# type: ignore
"""Tag migration
Revision ID: 40355237ae7c
Revises: 99b5570a348e
Create Date: 2024-06-20 13:47:53.630547
"""
import sqlalchemy as sa
from alembic import op

from artemis.config import Config

# revision identifiers, used by Alembic.
revision = "40355237ae7c"
down_revision = "99b5570a348e"
branch_labels = None
depends_on = None
DATABASE_URL = Config.Data.POSTGRES_CONN_STR
engine = sa.create_engine(DATABASE_URL)


def upgrade():
op.create_table(
"tag",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("tag_name", sa.String(), nullable=True),
sa.Column("created_at", sa.DateTime(), server_default=sa.text("NOW()"), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(op.f("ix_tag_tag_name"), "tag", ["tag_name"], unique=True)

print("""Data is now being migrated. We recommend being patient as it may take some time...""")
op.execute("INSERT INTO tag (tag_name) SELECT tag FROM analysis UNION SELECT tag FROM task_result;")


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f("ix_tag_tag_name"), table_name="tag")
op.drop_table("tag")
# ### end Alembic commands ###
Loading

0 comments on commit 6ba9b45

Please sign in to comment.