🌙 LUNA - CodeLib
Collection
3 items
•
Updated
prompt
stringlengths 45
17.8k
| completion
stringlengths 6
107
| api
stringlengths 12
42
|
---|---|---|
from typing import List
from fastapi import APIRouter, Depends
from sqlmodel import select, Session
from app.models import *
from utils import get_session
router = APIRouter()
@router.get("/users", response_model=List[UserRead])
async def get_users(*, session: Session=Depends(get_session)):
statement = | select(User) | sqlmodel.select |
from typing import List
from fastapi import APIRouter, Depends
from sqlmodel import select, Session
from app.models import *
from utils import get_session
router = APIRouter()
@router.get("/users", response_model=List[UserRead])
async def get_users(*, session: Session=Depends(get_session)):
statement = select(User)
results = session.exec(statement).all()
return results
@router.post("/tasks", response_model=List[TaskRead])
async def get_tasks(user: UserQuery, session: Session=Depends(get_session)):
statement = | select(Task) | sqlmodel.select |
from typing import List
from fastapi import APIRouter, Depends
from sqlmodel import select, Session
from app.models import *
from utils import get_session
router = APIRouter()
@router.get("/users", response_model=List[UserRead])
async def get_users(*, session: Session=Depends(get_session)):
statement = select(User)
results = session.exec(statement).all()
return results
@router.post("/tasks", response_model=List[TaskRead])
async def get_tasks(user: UserQuery, session: Session=Depends(get_session)):
statement = select(Task).where(Task.owner_id == user.id)
results = session.exec(statement).all()
return results
@router.post("/task", response_model=TaskRead)
async def get_task(task: TaskQuery, session: Session=Depends(get_session)):
statement = | select(Task) | sqlmodel.select |
from typing import List
from fastapi import APIRouter, Depends
from sqlmodel import select, Session
from app.models import *
from utils import get_session
router = APIRouter()
@router.get("/users", response_model=List[UserRead])
async def get_users(*, session: Session=Depends(get_session)):
statement = select(User)
results = session.exec(statement).all()
return results
@router.post("/tasks", response_model=List[TaskRead])
async def get_tasks(user: UserQuery, session: Session=Depends(get_session)):
statement = select(Task).where(Task.owner_id == user.id)
results = session.exec(statement).all()
return results
@router.post("/task", response_model=TaskRead)
async def get_task(task: TaskQuery, session: Session=Depends(get_session)):
statement = select(Task).where(Task.owner_id == task.owner_id and Task.id == task.id)
result = session.exec(statement).one_or_none()
return result
@router.post("/create/task", response_model=StandardResponse)
async def create_task(task: TaskCreate, session: Session=Depends(get_session)):
db_task = Task.from_orm(task)
session.add(db_task)
session.commit()
session.refresh(db_task)
return StandardResponse()
@router.post("/create/user", response_model=StandardResponse)
async def create_user(user: UserCreate, session: Session=Depends(get_session)):
db_user = User.from_orm(user)
session.add(db_user)
session.commit()
session.refresh(db_user)
return StandardResponse()
@router.post("/delete/task", response_model=StandardResponse)
async def delete_task(task: TaskQuery, session: Session=Depends(get_session)):
statement = | select(Task) | sqlmodel.select |
from typing import List
from fastapi import APIRouter, Depends
from sqlmodel import select, Session
from app.models import *
from utils import get_session
router = APIRouter()
@router.get("/users", response_model=List[UserRead])
async def get_users(*, session: Session=Depends(get_session)):
statement = select(User)
results = session.exec(statement).all()
return results
@router.post("/tasks", response_model=List[TaskRead])
async def get_tasks(user: UserQuery, session: Session=Depends(get_session)):
statement = select(Task).where(Task.owner_id == user.id)
results = session.exec(statement).all()
return results
@router.post("/task", response_model=TaskRead)
async def get_task(task: TaskQuery, session: Session=Depends(get_session)):
statement = select(Task).where(Task.owner_id == task.owner_id and Task.id == task.id)
result = session.exec(statement).one_or_none()
return result
@router.post("/create/task", response_model=StandardResponse)
async def create_task(task: TaskCreate, session: Session=Depends(get_session)):
db_task = Task.from_orm(task)
session.add(db_task)
session.commit()
session.refresh(db_task)
return StandardResponse()
@router.post("/create/user", response_model=StandardResponse)
async def create_user(user: UserCreate, session: Session=Depends(get_session)):
db_user = User.from_orm(user)
session.add(db_user)
session.commit()
session.refresh(db_user)
return StandardResponse()
@router.post("/delete/task", response_model=StandardResponse)
async def delete_task(task: TaskQuery, session: Session=Depends(get_session)):
statement = select(Task).where(Task.id == task.id and Task.owner_id == task.owner_id)
result = session.exec(statement)
task = result.one_or_none()
if task:
session.delete(task)
session.commit()
return StandardResponse()
return StandardResponse(success="Failure", message="Invalid Task id or Owner id", code=400)
@router.post("/delete/user", response_model=StandardResponse)
async def delete_user(user: UserQuery, session: Session=Depends(get_session)):
statement = | select(User) | sqlmodel.select |
import types
from dataclasses import dataclass
from typing import Callable, List, Union
from fastapi import Depends, FastAPI, HTTPException, Query
from sqlmodel import Field, Session, SQLModel, select
# Model generator + container -------------------------------------------------------------
@dataclass
class MultipleModels:
path: str
base: SQLModel
response: SQLModel
def __post_init__(self):
self.creation: SQLModel = self.make_creator_cls()
self.table: SQLModel = self.make_table_cls()
self.update: SQLModel = self.make_updater_cls()
@staticmethod
def make_cls_name(base: type, rename_base_to: str) -> str:
"""For a class name of format ``"ClassBase"``, return a modified name in which
the substring ``"Base"`` is replaced with the string passed to ``rename_base_to``.
:param base: The base model. It's name must end with the substring ``"Base"``.
:param rename_base_to: String to replace `"Base"` with.
"""
return base.__name__.replace("Base", rename_base_to)
def make_creator_cls(self) -> SQLModel:
"""From a base model, make and return a creation model. As described in
https://sqlmodel.tiangolo.com/tutorial/fastapi/multiple-models/#the-herocreate-data-model,
the creation model is simply a copy of the base model, with the substring ``"Base"`` in the
class name replaced by the substring ``"Create"``.
:param base: The base model.
"""
cls_name = self.make_cls_name(self.base, "Create")
return type(cls_name, (self.base,), {})
def make_updater_cls(self) -> SQLModel:
"""From a base model, make and return an update model. As described in
https://sqlmodel.tiangolo.com/tutorial/fastapi/update/#heroupdate-model, the update model
is the same as the base model, but with all fields annotated as ``Optional`` and all field
defaults set to ``None``.
:param base: The base model. Note that unlike in ``make_creator``, this is not the base for
inheritance (all updaters inherit directly from ``SQLModel``) but rather is used to derive
the output class name, attributes, and type annotations.
"""
cls_name = self.make_cls_name(self.base, "Update")
sig = self.base.__signature__
params = list(sig.parameters)
# Pulling type via `__signature__` rather than `__annotation__` because
# this accessor drops the `typing.Union[...]` wrapper for optional fields
annotations = {p: Union[sig.parameters[p].annotation, None] for p in params}
defaults = {p: None for p in params}
attrs = {**defaults, "__annotations__": annotations}
return type(cls_name, (SQLModel,), attrs)
def make_table_cls(self) -> SQLModel:
"""From a base model, make and return a table model. As described in
https://sqlmodel.tiangolo.com/tutorial/fastapi/multiple-models/#the-hero-table-model,
the table model is the same as the base model, with the addition of the ``table=True`` class
creation keyword and an ``id`` attribute of type ``Optional[int]`` set to a default value of
``Field(default=None, primary_key=True)``.
:param base: The base model.
"""
cls_name = self.make_cls_name(self.base, "")
attrs = dict(id= | Field(default=None, primary_key=True) | sqlmodel.Field |
import types
from dataclasses import dataclass
from typing import Callable, List, Union
from fastapi import Depends, FastAPI, HTTPException, Query
from sqlmodel import Field, Session, SQLModel, select
# Model generator + container -------------------------------------------------------------
@dataclass
class MultipleModels:
path: str
base: SQLModel
response: SQLModel
def __post_init__(self):
self.creation: SQLModel = self.make_creator_cls()
self.table: SQLModel = self.make_table_cls()
self.update: SQLModel = self.make_updater_cls()
@staticmethod
def make_cls_name(base: type, rename_base_to: str) -> str:
"""For a class name of format ``"ClassBase"``, return a modified name in which
the substring ``"Base"`` is replaced with the string passed to ``rename_base_to``.
:param base: The base model. It's name must end with the substring ``"Base"``.
:param rename_base_to: String to replace `"Base"` with.
"""
return base.__name__.replace("Base", rename_base_to)
def make_creator_cls(self) -> SQLModel:
"""From a base model, make and return a creation model. As described in
https://sqlmodel.tiangolo.com/tutorial/fastapi/multiple-models/#the-herocreate-data-model,
the creation model is simply a copy of the base model, with the substring ``"Base"`` in the
class name replaced by the substring ``"Create"``.
:param base: The base model.
"""
cls_name = self.make_cls_name(self.base, "Create")
return type(cls_name, (self.base,), {})
def make_updater_cls(self) -> SQLModel:
"""From a base model, make and return an update model. As described in
https://sqlmodel.tiangolo.com/tutorial/fastapi/update/#heroupdate-model, the update model
is the same as the base model, but with all fields annotated as ``Optional`` and all field
defaults set to ``None``.
:param base: The base model. Note that unlike in ``make_creator``, this is not the base for
inheritance (all updaters inherit directly from ``SQLModel``) but rather is used to derive
the output class name, attributes, and type annotations.
"""
cls_name = self.make_cls_name(self.base, "Update")
sig = self.base.__signature__
params = list(sig.parameters)
# Pulling type via `__signature__` rather than `__annotation__` because
# this accessor drops the `typing.Union[...]` wrapper for optional fields
annotations = {p: Union[sig.parameters[p].annotation, None] for p in params}
defaults = {p: None for p in params}
attrs = {**defaults, "__annotations__": annotations}
return type(cls_name, (SQLModel,), attrs)
def make_table_cls(self) -> SQLModel:
"""From a base model, make and return a table model. As described in
https://sqlmodel.tiangolo.com/tutorial/fastapi/multiple-models/#the-hero-table-model,
the table model is the same as the base model, with the addition of the ``table=True`` class
creation keyword and an ``id`` attribute of type ``Optional[int]`` set to a default value of
``Field(default=None, primary_key=True)``.
:param base: The base model.
"""
cls_name = self.make_cls_name(self.base, "")
attrs = dict(id=Field(default=None, primary_key=True))
annotations = dict(id=Union[int, None])
attrs.update(dict(__annotations__=annotations))
# We are using `typing.new_class` (vs. `type`) b/c it supports the `table=True` kwarg.
# https://twitter.com/simonw/status/1430255521127305216?s=20
# https://docs.python.org/3/reference/datamodel.html#customizing-class-creation
return types.new_class(
cls_name, (self.base,), dict(table=True), lambda ns: ns.update(attrs)
)
# SQLModel database interface functions ---------------------------------------------------
def create(*, session: Session, table_cls: SQLModel, model: SQLModel) -> SQLModel:
db_model = table_cls.from_orm(model)
session.add(db_model)
session.commit()
session.refresh(db_model)
return db_model
def read_range(*, session: Session, table_cls: SQLModel, offset: int, limit: int) -> List:
return session.exec( | select(table_cls) | sqlmodel.select |
import datetime
from sqlmodel import Field, Relationship, SQLModel
class User(SQLModel, table=True):
__tablename__ = "users"
id: int = | Field(primary_key=True) | sqlmodel.Field |
import contextlib
import os
import pathlib
import hypothesis.strategies as st
import pytest
import strawberry
from hypothesis.strategies import SearchStrategy
from sqlalchemy.pool import StaticPool
from sqlmodel import Session, SQLModel, create_engine
from starlette.testclient import TestClient
from fastapi_server.database.database import get_session
from fastapi_server.main import app
from fastapi_server.routes.graphql import schema
TEST_DB_FILE_PATH = 'test.db'
TEST_DB_URL = f'sqlite:///{TEST_DB_FILE_PATH}'
TEST_DB_MEMORY_PATH = ':memory:'
TEST_DB_MEMORY_URL = f'sqlite:///{TEST_DB_MEMORY_PATH}'
class BaseTest:
method_client: TestClient = None # type: ignore
method_session: Session = None # type: ignore
example_client: TestClient = None # type: ignore
example_session: Session = None # type: ignore
def setup_method(self, _method):
BaseTest.method_session = BaseTest.create_memory_sesssion()
# BaseTest.method_session = BaseTest.create_file_sesssion()
BaseTest.method_client = TestClient(app)
BaseTest.method_client.app.dependency_overrides[get_session] = BaseTest.method_get_session
def teardown_method(self, _method):
if BaseTest.method_session is not None:
db_path = pathlib.Path(TEST_DB_FILE_PATH)
# Remove file if it wasnt a memory database
if BaseTest.method_session.bind.url.database != TEST_DB_MEMORY_PATH and db_path.is_file():
os.remove(db_path)
BaseTest.method_session.close()
BaseTest.method_session = None
app.dependency_overrides.clear()
BaseTest.method_client = None
@classmethod
def create_file_sesssion(cls):
engine = | create_engine(TEST_DB_URL, connect_args={'check_same_thread': False}, poolclass=StaticPool) | sqlmodel.create_engine |
import contextlib
import os
import pathlib
import hypothesis.strategies as st
import pytest
import strawberry
from hypothesis.strategies import SearchStrategy
from sqlalchemy.pool import StaticPool
from sqlmodel import Session, SQLModel, create_engine
from starlette.testclient import TestClient
from fastapi_server.database.database import get_session
from fastapi_server.main import app
from fastapi_server.routes.graphql import schema
TEST_DB_FILE_PATH = 'test.db'
TEST_DB_URL = f'sqlite:///{TEST_DB_FILE_PATH}'
TEST_DB_MEMORY_PATH = ':memory:'
TEST_DB_MEMORY_URL = f'sqlite:///{TEST_DB_MEMORY_PATH}'
class BaseTest:
method_client: TestClient = None # type: ignore
method_session: Session = None # type: ignore
example_client: TestClient = None # type: ignore
example_session: Session = None # type: ignore
def setup_method(self, _method):
BaseTest.method_session = BaseTest.create_memory_sesssion()
# BaseTest.method_session = BaseTest.create_file_sesssion()
BaseTest.method_client = TestClient(app)
BaseTest.method_client.app.dependency_overrides[get_session] = BaseTest.method_get_session
def teardown_method(self, _method):
if BaseTest.method_session is not None:
db_path = pathlib.Path(TEST_DB_FILE_PATH)
# Remove file if it wasnt a memory database
if BaseTest.method_session.bind.url.database != TEST_DB_MEMORY_PATH and db_path.is_file():
os.remove(db_path)
BaseTest.method_session.close()
BaseTest.method_session = None
app.dependency_overrides.clear()
BaseTest.method_client = None
@classmethod
def create_file_sesssion(cls):
engine = create_engine(TEST_DB_URL, connect_args={'check_same_thread': False}, poolclass=StaticPool)
| SQLModel.metadata.create_all(engine) | sqlmodel.SQLModel.metadata.create_all |
import contextlib
import os
import pathlib
import hypothesis.strategies as st
import pytest
import strawberry
from hypothesis.strategies import SearchStrategy
from sqlalchemy.pool import StaticPool
from sqlmodel import Session, SQLModel, create_engine
from starlette.testclient import TestClient
from fastapi_server.database.database import get_session
from fastapi_server.main import app
from fastapi_server.routes.graphql import schema
TEST_DB_FILE_PATH = 'test.db'
TEST_DB_URL = f'sqlite:///{TEST_DB_FILE_PATH}'
TEST_DB_MEMORY_PATH = ':memory:'
TEST_DB_MEMORY_URL = f'sqlite:///{TEST_DB_MEMORY_PATH}'
class BaseTest:
method_client: TestClient = None # type: ignore
method_session: Session = None # type: ignore
example_client: TestClient = None # type: ignore
example_session: Session = None # type: ignore
def setup_method(self, _method):
BaseTest.method_session = BaseTest.create_memory_sesssion()
# BaseTest.method_session = BaseTest.create_file_sesssion()
BaseTest.method_client = TestClient(app)
BaseTest.method_client.app.dependency_overrides[get_session] = BaseTest.method_get_session
def teardown_method(self, _method):
if BaseTest.method_session is not None:
db_path = pathlib.Path(TEST_DB_FILE_PATH)
# Remove file if it wasnt a memory database
if BaseTest.method_session.bind.url.database != TEST_DB_MEMORY_PATH and db_path.is_file():
os.remove(db_path)
BaseTest.method_session.close()
BaseTest.method_session = None
app.dependency_overrides.clear()
BaseTest.method_client = None
@classmethod
def create_file_sesssion(cls):
engine = create_engine(TEST_DB_URL, connect_args={'check_same_thread': False}, poolclass=StaticPool)
SQLModel.metadata.create_all(engine)
with Session(engine, autoflush=False, autocommit=False) as session:
return session
@classmethod
def create_memory_sesssion(cls):
engine = | create_engine(TEST_DB_MEMORY_URL, connect_args={'check_same_thread': False}, poolclass=StaticPool) | sqlmodel.create_engine |
import contextlib
import os
import pathlib
import hypothesis.strategies as st
import pytest
import strawberry
from hypothesis.strategies import SearchStrategy
from sqlalchemy.pool import StaticPool
from sqlmodel import Session, SQLModel, create_engine
from starlette.testclient import TestClient
from fastapi_server.database.database import get_session
from fastapi_server.main import app
from fastapi_server.routes.graphql import schema
TEST_DB_FILE_PATH = 'test.db'
TEST_DB_URL = f'sqlite:///{TEST_DB_FILE_PATH}'
TEST_DB_MEMORY_PATH = ':memory:'
TEST_DB_MEMORY_URL = f'sqlite:///{TEST_DB_MEMORY_PATH}'
class BaseTest:
method_client: TestClient = None # type: ignore
method_session: Session = None # type: ignore
example_client: TestClient = None # type: ignore
example_session: Session = None # type: ignore
def setup_method(self, _method):
BaseTest.method_session = BaseTest.create_memory_sesssion()
# BaseTest.method_session = BaseTest.create_file_sesssion()
BaseTest.method_client = TestClient(app)
BaseTest.method_client.app.dependency_overrides[get_session] = BaseTest.method_get_session
def teardown_method(self, _method):
if BaseTest.method_session is not None:
db_path = pathlib.Path(TEST_DB_FILE_PATH)
# Remove file if it wasnt a memory database
if BaseTest.method_session.bind.url.database != TEST_DB_MEMORY_PATH and db_path.is_file():
os.remove(db_path)
BaseTest.method_session.close()
BaseTest.method_session = None
app.dependency_overrides.clear()
BaseTest.method_client = None
@classmethod
def create_file_sesssion(cls):
engine = create_engine(TEST_DB_URL, connect_args={'check_same_thread': False}, poolclass=StaticPool)
SQLModel.metadata.create_all(engine)
with Session(engine, autoflush=False, autocommit=False) as session:
return session
@classmethod
def create_memory_sesssion(cls):
engine = create_engine(TEST_DB_MEMORY_URL, connect_args={'check_same_thread': False}, poolclass=StaticPool)
| SQLModel.metadata.create_all(engine) | sqlmodel.SQLModel.metadata.create_all |
import contextlib
import os
import pathlib
import hypothesis.strategies as st
import pytest
import strawberry
from hypothesis.strategies import SearchStrategy
from sqlalchemy.pool import StaticPool
from sqlmodel import Session, SQLModel, create_engine
from starlette.testclient import TestClient
from fastapi_server.database.database import get_session
from fastapi_server.main import app
from fastapi_server.routes.graphql import schema
TEST_DB_FILE_PATH = 'test.db'
TEST_DB_URL = f'sqlite:///{TEST_DB_FILE_PATH}'
TEST_DB_MEMORY_PATH = ':memory:'
TEST_DB_MEMORY_URL = f'sqlite:///{TEST_DB_MEMORY_PATH}'
class BaseTest:
method_client: TestClient = None # type: ignore
method_session: Session = None # type: ignore
example_client: TestClient = None # type: ignore
example_session: Session = None # type: ignore
def setup_method(self, _method):
BaseTest.method_session = BaseTest.create_memory_sesssion()
# BaseTest.method_session = BaseTest.create_file_sesssion()
BaseTest.method_client = TestClient(app)
BaseTest.method_client.app.dependency_overrides[get_session] = BaseTest.method_get_session
def teardown_method(self, _method):
if BaseTest.method_session is not None:
db_path = pathlib.Path(TEST_DB_FILE_PATH)
# Remove file if it wasnt a memory database
if BaseTest.method_session.bind.url.database != TEST_DB_MEMORY_PATH and db_path.is_file():
os.remove(db_path)
BaseTest.method_session.close()
BaseTest.method_session = None
app.dependency_overrides.clear()
BaseTest.method_client = None
@classmethod
def create_file_sesssion(cls):
engine = create_engine(TEST_DB_URL, connect_args={'check_same_thread': False}, poolclass=StaticPool)
SQLModel.metadata.create_all(engine)
with | Session(engine, autoflush=False, autocommit=False) | sqlmodel.Session |
import contextlib
import os
import pathlib
import hypothesis.strategies as st
import pytest
import strawberry
from hypothesis.strategies import SearchStrategy
from sqlalchemy.pool import StaticPool
from sqlmodel import Session, SQLModel, create_engine
from starlette.testclient import TestClient
from fastapi_server.database.database import get_session
from fastapi_server.main import app
from fastapi_server.routes.graphql import schema
TEST_DB_FILE_PATH = 'test.db'
TEST_DB_URL = f'sqlite:///{TEST_DB_FILE_PATH}'
TEST_DB_MEMORY_PATH = ':memory:'
TEST_DB_MEMORY_URL = f'sqlite:///{TEST_DB_MEMORY_PATH}'
class BaseTest:
method_client: TestClient = None # type: ignore
method_session: Session = None # type: ignore
example_client: TestClient = None # type: ignore
example_session: Session = None # type: ignore
def setup_method(self, _method):
BaseTest.method_session = BaseTest.create_memory_sesssion()
# BaseTest.method_session = BaseTest.create_file_sesssion()
BaseTest.method_client = TestClient(app)
BaseTest.method_client.app.dependency_overrides[get_session] = BaseTest.method_get_session
def teardown_method(self, _method):
if BaseTest.method_session is not None:
db_path = pathlib.Path(TEST_DB_FILE_PATH)
# Remove file if it wasnt a memory database
if BaseTest.method_session.bind.url.database != TEST_DB_MEMORY_PATH and db_path.is_file():
os.remove(db_path)
BaseTest.method_session.close()
BaseTest.method_session = None
app.dependency_overrides.clear()
BaseTest.method_client = None
@classmethod
def create_file_sesssion(cls):
engine = create_engine(TEST_DB_URL, connect_args={'check_same_thread': False}, poolclass=StaticPool)
SQLModel.metadata.create_all(engine)
with Session(engine, autoflush=False, autocommit=False) as session:
return session
@classmethod
def create_memory_sesssion(cls):
engine = create_engine(TEST_DB_MEMORY_URL, connect_args={'check_same_thread': False}, poolclass=StaticPool)
SQLModel.metadata.create_all(engine)
with | Session(engine, autoflush=False, autocommit=False) | sqlmodel.Session |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from random import shuffle
from typing import Optional, cast
import pytest
from sqlalchemy.future import Engine
from sqlmodel import Session, func, select
import tests.example.entities as entities
from dbgen.core.args import Const
from dbgen.core.entity import Entity
from dbgen.core.func import Import
from dbgen.core.generator import Generator
from dbgen.core.metadata import RunEntity
from dbgen.core.node.load import Load
from dbgen.core.node.query import BaseQuery
from dbgen.core.node.transforms import PyBlock
def transform_func(x):
return f"{x}-child"
@pytest.fixture(scope='function')
def basic_generator() -> Generator:
Parent = entities.Parent
Child = entities.Child
select_stmt = | select(Parent.label) | sqlmodel.select |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from random import shuffle
from typing import Optional, cast
import pytest
from sqlalchemy.future import Engine
from sqlmodel import Session, func, select
import tests.example.entities as entities
from dbgen.core.args import Const
from dbgen.core.entity import Entity
from dbgen.core.func import Import
from dbgen.core.generator import Generator
from dbgen.core.metadata import RunEntity
from dbgen.core.node.load import Load
from dbgen.core.node.query import BaseQuery
from dbgen.core.node.transforms import PyBlock
def transform_func(x):
return f"{x}-child"
@pytest.fixture(scope='function')
def basic_generator() -> Generator:
Parent = entities.Parent
Child = entities.Child
select_stmt = select(Parent.label)
query = BaseQuery.from_select_statement(select_stmt)
assert isinstance(query.hash, str)
pyblock = PyBlock(function=transform_func, inputs=[query["label"]], outputs=["newnames"])
load = Child.load(insert=True, label=pyblock["newnames"], type=Const("child_type"))
assert isinstance(load.hash, str)
gen = Generator(name="test", extract=query, transforms=[pyblock], loads=[load])
return gen
def test_basic_graph_sort(basic_generator: Generator):
"""Ensure a simple Query->PyBlock->Load is sorted correctly."""
graph = basic_generator._computational_graph()
assert len(graph) == 3
sorted_nodes = basic_generator._sort_graph()
query, transform, load = sorted_nodes
assert isinstance(query, BaseQuery)
assert isinstance(transform, PyBlock)
assert isinstance(load, Load)
def test_basic_graph_in_place(basic_generator: Generator):
"""Ensure that changes to the output of ._sort_graph() are in place and affect the generator as well."""
query, transform, load = basic_generator._sort_graph()
assert isinstance(load, Load)
load.run({transform.hash: {"newnames": ("1", "2")}})
assert load._output == basic_generator._sorted_loads()[0]._output
assert isinstance(query, BaseQuery)
query.outputs.append("test")
assert basic_generator.extract == query
assert isinstance(transform, PyBlock)
import_to_add = Import(lib="numpy", lib_alias="np")
transform.env.imports.append(import_to_add)
assert basic_generator.transforms[0] == transform
assert basic_generator.transforms[0].env.imports == [import_to_add]
def test_sorted_loads():
"""Shuffle around the loads and make sure sorted_loads still works."""
val = Const("test")
gp_load = entities.GrandParent.load(label=val, type=val)
u_load = entities.Parent.load(label=val, type=Const("uncle"), grand_parent_id=gp_load)
p_load = entities.Parent.load(label=val, type=val, grand_parent_id=gp_load)
c_load = entities.Child.load(label=val, type=val, parent_id=p_load, uncle_id=u_load)
loads = [gp_load, c_load, p_load, u_load]
for _ in range(10):
shuffle(loads)
gen = Generator(name="test", loads=loads)
assert gen._sorted_loads() == [
gp_load,
*sorted((u_load, p_load), key=lambda x: x.hash),
c_load,
]
@pytest.mark.skip
def test_no_extractor(sql_engine: Engine, raw_connection):
"""Shuffle around the loads and make sure sorted_loads still works."""
entities.Parent.metadata.create_all(sql_engine)
pyblock = PyBlock(function=transform_func, inputs=[Const("test")], outputs=["newnames"])
p_load = entities.GrandParent.load(insert=True, label=pyblock["newnames"], type=Const("gp_type"))
gen = Generator(name="test", transforms=[pyblock], loads=[p_load])
gen.run(sql_engine)
with Session(sql_engine) as session:
session = cast(Session, session)
statement = select(entities.GrandParent).where(entities.GrandParent.label == "test-child")
result = session.exec(statement)
assert result.one()
@pytest.mark.database
def test_dumb_extractor(connection, sql_engine, recreate_meta):
class User(Entity, table=True):
__identifying__ = {"label"}
label: Optional[str]
new_label: Optional[str] = None
User.metadata.create_all(connection)
num_users = 100
sess = | Session(connection) | sqlmodel.Session |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from random import shuffle
from typing import Optional, cast
import pytest
from sqlalchemy.future import Engine
from sqlmodel import Session, func, select
import tests.example.entities as entities
from dbgen.core.args import Const
from dbgen.core.entity import Entity
from dbgen.core.func import Import
from dbgen.core.generator import Generator
from dbgen.core.metadata import RunEntity
from dbgen.core.node.load import Load
from dbgen.core.node.query import BaseQuery
from dbgen.core.node.transforms import PyBlock
def transform_func(x):
return f"{x}-child"
@pytest.fixture(scope='function')
def basic_generator() -> Generator:
Parent = entities.Parent
Child = entities.Child
select_stmt = select(Parent.label)
query = BaseQuery.from_select_statement(select_stmt)
assert isinstance(query.hash, str)
pyblock = PyBlock(function=transform_func, inputs=[query["label"]], outputs=["newnames"])
load = Child.load(insert=True, label=pyblock["newnames"], type=Const("child_type"))
assert isinstance(load.hash, str)
gen = Generator(name="test", extract=query, transforms=[pyblock], loads=[load])
return gen
def test_basic_graph_sort(basic_generator: Generator):
"""Ensure a simple Query->PyBlock->Load is sorted correctly."""
graph = basic_generator._computational_graph()
assert len(graph) == 3
sorted_nodes = basic_generator._sort_graph()
query, transform, load = sorted_nodes
assert isinstance(query, BaseQuery)
assert isinstance(transform, PyBlock)
assert isinstance(load, Load)
def test_basic_graph_in_place(basic_generator: Generator):
"""Ensure that changes to the output of ._sort_graph() are in place and affect the generator as well."""
query, transform, load = basic_generator._sort_graph()
assert isinstance(load, Load)
load.run({transform.hash: {"newnames": ("1", "2")}})
assert load._output == basic_generator._sorted_loads()[0]._output
assert isinstance(query, BaseQuery)
query.outputs.append("test")
assert basic_generator.extract == query
assert isinstance(transform, PyBlock)
import_to_add = Import(lib="numpy", lib_alias="np")
transform.env.imports.append(import_to_add)
assert basic_generator.transforms[0] == transform
assert basic_generator.transforms[0].env.imports == [import_to_add]
def test_sorted_loads():
"""Shuffle around the loads and make sure sorted_loads still works."""
val = Const("test")
gp_load = entities.GrandParent.load(label=val, type=val)
u_load = entities.Parent.load(label=val, type=Const("uncle"), grand_parent_id=gp_load)
p_load = entities.Parent.load(label=val, type=val, grand_parent_id=gp_load)
c_load = entities.Child.load(label=val, type=val, parent_id=p_load, uncle_id=u_load)
loads = [gp_load, c_load, p_load, u_load]
for _ in range(10):
shuffle(loads)
gen = Generator(name="test", loads=loads)
assert gen._sorted_loads() == [
gp_load,
*sorted((u_load, p_load), key=lambda x: x.hash),
c_load,
]
@pytest.mark.skip
def test_no_extractor(sql_engine: Engine, raw_connection):
"""Shuffle around the loads and make sure sorted_loads still works."""
entities.Parent.metadata.create_all(sql_engine)
pyblock = PyBlock(function=transform_func, inputs=[Const("test")], outputs=["newnames"])
p_load = entities.GrandParent.load(insert=True, label=pyblock["newnames"], type=Const("gp_type"))
gen = Generator(name="test", transforms=[pyblock], loads=[p_load])
gen.run(sql_engine)
with Session(sql_engine) as session:
session = cast(Session, session)
statement = select(entities.GrandParent).where(entities.GrandParent.label == "test-child")
result = session.exec(statement)
assert result.one()
@pytest.mark.database
def test_dumb_extractor(connection, sql_engine, recreate_meta):
class User(Entity, table=True):
__identifying__ = {"label"}
label: Optional[str]
new_label: Optional[str] = None
User.metadata.create_all(connection)
num_users = 100
sess = Session(connection)
users = [User(label=f"user_{i}") for i in range(num_users)]
user_le = User._get_load_entity()
for user in users:
user.id = user_le._get_hash(user.dict())
sess.add(user)
count = sess.exec(select(func.count(User.id))).one()
assert count == num_users
connection.commit()
statement = | select(User.id, User.label) | sqlmodel.select |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from random import shuffle
from typing import Optional, cast
import pytest
from sqlalchemy.future import Engine
from sqlmodel import Session, func, select
import tests.example.entities as entities
from dbgen.core.args import Const
from dbgen.core.entity import Entity
from dbgen.core.func import Import
from dbgen.core.generator import Generator
from dbgen.core.metadata import RunEntity
from dbgen.core.node.load import Load
from dbgen.core.node.query import BaseQuery
from dbgen.core.node.transforms import PyBlock
def transform_func(x):
return f"{x}-child"
@pytest.fixture(scope='function')
def basic_generator() -> Generator:
Parent = entities.Parent
Child = entities.Child
select_stmt = select(Parent.label)
query = BaseQuery.from_select_statement(select_stmt)
assert isinstance(query.hash, str)
pyblock = PyBlock(function=transform_func, inputs=[query["label"]], outputs=["newnames"])
load = Child.load(insert=True, label=pyblock["newnames"], type=Const("child_type"))
assert isinstance(load.hash, str)
gen = Generator(name="test", extract=query, transforms=[pyblock], loads=[load])
return gen
def test_basic_graph_sort(basic_generator: Generator):
"""Ensure a simple Query->PyBlock->Load is sorted correctly."""
graph = basic_generator._computational_graph()
assert len(graph) == 3
sorted_nodes = basic_generator._sort_graph()
query, transform, load = sorted_nodes
assert isinstance(query, BaseQuery)
assert isinstance(transform, PyBlock)
assert isinstance(load, Load)
def test_basic_graph_in_place(basic_generator: Generator):
"""Ensure that changes to the output of ._sort_graph() are in place and affect the generator as well."""
query, transform, load = basic_generator._sort_graph()
assert isinstance(load, Load)
load.run({transform.hash: {"newnames": ("1", "2")}})
assert load._output == basic_generator._sorted_loads()[0]._output
assert isinstance(query, BaseQuery)
query.outputs.append("test")
assert basic_generator.extract == query
assert isinstance(transform, PyBlock)
import_to_add = Import(lib="numpy", lib_alias="np")
transform.env.imports.append(import_to_add)
assert basic_generator.transforms[0] == transform
assert basic_generator.transforms[0].env.imports == [import_to_add]
def test_sorted_loads():
"""Shuffle around the loads and make sure sorted_loads still works."""
val = Const("test")
gp_load = entities.GrandParent.load(label=val, type=val)
u_load = entities.Parent.load(label=val, type=Const("uncle"), grand_parent_id=gp_load)
p_load = entities.Parent.load(label=val, type=val, grand_parent_id=gp_load)
c_load = entities.Child.load(label=val, type=val, parent_id=p_load, uncle_id=u_load)
loads = [gp_load, c_load, p_load, u_load]
for _ in range(10):
shuffle(loads)
gen = Generator(name="test", loads=loads)
assert gen._sorted_loads() == [
gp_load,
*sorted((u_load, p_load), key=lambda x: x.hash),
c_load,
]
@pytest.mark.skip
def test_no_extractor(sql_engine: Engine, raw_connection):
"""Shuffle around the loads and make sure sorted_loads still works."""
entities.Parent.metadata.create_all(sql_engine)
pyblock = PyBlock(function=transform_func, inputs=[Const("test")], outputs=["newnames"])
p_load = entities.GrandParent.load(insert=True, label=pyblock["newnames"], type=Const("gp_type"))
gen = Generator(name="test", transforms=[pyblock], loads=[p_load])
gen.run(sql_engine)
with | Session(sql_engine) | sqlmodel.Session |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from random import shuffle
from typing import Optional, cast
import pytest
from sqlalchemy.future import Engine
from sqlmodel import Session, func, select
import tests.example.entities as entities
from dbgen.core.args import Const
from dbgen.core.entity import Entity
from dbgen.core.func import Import
from dbgen.core.generator import Generator
from dbgen.core.metadata import RunEntity
from dbgen.core.node.load import Load
from dbgen.core.node.query import BaseQuery
from dbgen.core.node.transforms import PyBlock
def transform_func(x):
return f"{x}-child"
@pytest.fixture(scope='function')
def basic_generator() -> Generator:
Parent = entities.Parent
Child = entities.Child
select_stmt = select(Parent.label)
query = BaseQuery.from_select_statement(select_stmt)
assert isinstance(query.hash, str)
pyblock = PyBlock(function=transform_func, inputs=[query["label"]], outputs=["newnames"])
load = Child.load(insert=True, label=pyblock["newnames"], type=Const("child_type"))
assert isinstance(load.hash, str)
gen = Generator(name="test", extract=query, transforms=[pyblock], loads=[load])
return gen
def test_basic_graph_sort(basic_generator: Generator):
"""Ensure a simple Query->PyBlock->Load is sorted correctly."""
graph = basic_generator._computational_graph()
assert len(graph) == 3
sorted_nodes = basic_generator._sort_graph()
query, transform, load = sorted_nodes
assert isinstance(query, BaseQuery)
assert isinstance(transform, PyBlock)
assert isinstance(load, Load)
def test_basic_graph_in_place(basic_generator: Generator):
"""Ensure that changes to the output of ._sort_graph() are in place and affect the generator as well."""
query, transform, load = basic_generator._sort_graph()
assert isinstance(load, Load)
load.run({transform.hash: {"newnames": ("1", "2")}})
assert load._output == basic_generator._sorted_loads()[0]._output
assert isinstance(query, BaseQuery)
query.outputs.append("test")
assert basic_generator.extract == query
assert isinstance(transform, PyBlock)
import_to_add = Import(lib="numpy", lib_alias="np")
transform.env.imports.append(import_to_add)
assert basic_generator.transforms[0] == transform
assert basic_generator.transforms[0].env.imports == [import_to_add]
def test_sorted_loads():
"""Shuffle around the loads and make sure sorted_loads still works."""
val = Const("test")
gp_load = entities.GrandParent.load(label=val, type=val)
u_load = entities.Parent.load(label=val, type=Const("uncle"), grand_parent_id=gp_load)
p_load = entities.Parent.load(label=val, type=val, grand_parent_id=gp_load)
c_load = entities.Child.load(label=val, type=val, parent_id=p_load, uncle_id=u_load)
loads = [gp_load, c_load, p_load, u_load]
for _ in range(10):
shuffle(loads)
gen = Generator(name="test", loads=loads)
assert gen._sorted_loads() == [
gp_load,
*sorted((u_load, p_load), key=lambda x: x.hash),
c_load,
]
@pytest.mark.skip
def test_no_extractor(sql_engine: Engine, raw_connection):
"""Shuffle around the loads and make sure sorted_loads still works."""
entities.Parent.metadata.create_all(sql_engine)
pyblock = PyBlock(function=transform_func, inputs=[Const("test")], outputs=["newnames"])
p_load = entities.GrandParent.load(insert=True, label=pyblock["newnames"], type=Const("gp_type"))
gen = Generator(name="test", transforms=[pyblock], loads=[p_load])
gen.run(sql_engine)
with Session(sql_engine) as session:
session = cast(Session, session)
statement = | select(entities.GrandParent) | sqlmodel.select |
# Copyright 2021 Modelyst LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from random import shuffle
from typing import Optional, cast
import pytest
from sqlalchemy.future import Engine
from sqlmodel import Session, func, select
import tests.example.entities as entities
from dbgen.core.args import Const
from dbgen.core.entity import Entity
from dbgen.core.func import Import
from dbgen.core.generator import Generator
from dbgen.core.metadata import RunEntity
from dbgen.core.node.load import Load
from dbgen.core.node.query import BaseQuery
from dbgen.core.node.transforms import PyBlock
def transform_func(x):
return f"{x}-child"
@pytest.fixture(scope='function')
def basic_generator() -> Generator:
Parent = entities.Parent
Child = entities.Child
select_stmt = select(Parent.label)
query = BaseQuery.from_select_statement(select_stmt)
assert isinstance(query.hash, str)
pyblock = PyBlock(function=transform_func, inputs=[query["label"]], outputs=["newnames"])
load = Child.load(insert=True, label=pyblock["newnames"], type=Const("child_type"))
assert isinstance(load.hash, str)
gen = Generator(name="test", extract=query, transforms=[pyblock], loads=[load])
return gen
def test_basic_graph_sort(basic_generator: Generator):
"""Ensure a simple Query->PyBlock->Load is sorted correctly."""
graph = basic_generator._computational_graph()
assert len(graph) == 3
sorted_nodes = basic_generator._sort_graph()
query, transform, load = sorted_nodes
assert isinstance(query, BaseQuery)
assert isinstance(transform, PyBlock)
assert isinstance(load, Load)
def test_basic_graph_in_place(basic_generator: Generator):
"""Ensure that changes to the output of ._sort_graph() are in place and affect the generator as well."""
query, transform, load = basic_generator._sort_graph()
assert isinstance(load, Load)
load.run({transform.hash: {"newnames": ("1", "2")}})
assert load._output == basic_generator._sorted_loads()[0]._output
assert isinstance(query, BaseQuery)
query.outputs.append("test")
assert basic_generator.extract == query
assert isinstance(transform, PyBlock)
import_to_add = Import(lib="numpy", lib_alias="np")
transform.env.imports.append(import_to_add)
assert basic_generator.transforms[0] == transform
assert basic_generator.transforms[0].env.imports == [import_to_add]
def test_sorted_loads():
"""Shuffle around the loads and make sure sorted_loads still works."""
val = Const("test")
gp_load = entities.GrandParent.load(label=val, type=val)
u_load = entities.Parent.load(label=val, type=Const("uncle"), grand_parent_id=gp_load)
p_load = entities.Parent.load(label=val, type=val, grand_parent_id=gp_load)
c_load = entities.Child.load(label=val, type=val, parent_id=p_load, uncle_id=u_load)
loads = [gp_load, c_load, p_load, u_load]
for _ in range(10):
shuffle(loads)
gen = Generator(name="test", loads=loads)
assert gen._sorted_loads() == [
gp_load,
*sorted((u_load, p_load), key=lambda x: x.hash),
c_load,
]
@pytest.mark.skip
def test_no_extractor(sql_engine: Engine, raw_connection):
"""Shuffle around the loads and make sure sorted_loads still works."""
entities.Parent.metadata.create_all(sql_engine)
pyblock = PyBlock(function=transform_func, inputs=[Const("test")], outputs=["newnames"])
p_load = entities.GrandParent.load(insert=True, label=pyblock["newnames"], type=Const("gp_type"))
gen = Generator(name="test", transforms=[pyblock], loads=[p_load])
gen.run(sql_engine)
with Session(sql_engine) as session:
session = cast(Session, session)
statement = select(entities.GrandParent).where(entities.GrandParent.label == "test-child")
result = session.exec(statement)
assert result.one()
@pytest.mark.database
def test_dumb_extractor(connection, sql_engine, recreate_meta):
class User(Entity, table=True):
__identifying__ = {"label"}
label: Optional[str]
new_label: Optional[str] = None
User.metadata.create_all(connection)
num_users = 100
sess = Session(connection)
users = [User(label=f"user_{i}") for i in range(num_users)]
user_le = User._get_load_entity()
for user in users:
user.id = user_le._get_hash(user.dict())
sess.add(user)
count = sess.exec(select( | func.count(User.id) | sqlmodel.func.count |
from fastapi import APIRouter, Depends, HTTPException, Query, Path
from sqlmodel import Session, select
from sqlalchemy.exc import IntegrityError
from typing import List
import datetime as dt
from app.src.common.security import get_current_user
from app.src.common.utils import profiling_api
from app.src.models.app_user import AppUser
from app.src.models.product_type import (
ProductType,
ProductTypeRead,
ProductTypeCreate,
ProductTypeUpdate,
)
from app.src.db.engine import get_session
router = APIRouter()
# A scopo didattico inserita la validazione di producttype_id con Path:
# - non potrà essere < 1
async def get_producttype_or_404(
*,
session: Session = Depends(get_session),
producttype_id: int = Path(..., ge=1),
current_user: AppUser = Depends(get_current_user),
):
start_time = dt.datetime.now()
try:
db_pt = session.get(ProductType, producttype_id)
if db_pt:
return {
"db_pt": db_pt,
"username": current_user.username,
"start_time": start_time,
}
else:
raise HTTPException(status_code=404, detail="Product type not found")
except KeyError:
raise HTTPException(status_code=400, detail="Product type not found")
@router.get("/", response_model=List[ProductTypeRead])
# lte -> less than or equal
async def read_product_types(
*,
session: Session = Depends(get_session),
offset: int = 0,
limit: int = Query(default=100, lte=100),
current_user: AppUser = Depends(get_current_user),
):
"""
Get all the existing product types
"""
start_time = dt.datetime.now()
product_types = session.exec( | select(ProductType) | sqlmodel.select |
from datetime import datetime
from typing import Optional
from fastapi import APIRouter, Depends
from sqlmodel import Field, SQLModel
from ...db import get_session
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
router = APIRouter()
class HistorySummaryTreatmsummaryConference(SQLModel, table=True):
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
from datetime import datetime
from typing import Optional
from fastapi import APIRouter, Depends
from sqlmodel import Field, SQLModel
from ...db import get_session
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
router = APIRouter()
class HistorySummaryTreatmsummaryConference(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
history_id_order: int
history_id_conference: int
summary_treatmsummary_conference_id: int
state: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class SummaryTreatmsummaryConference(SQLModel, table=True):
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
from datetime import datetime
from typing import Optional
from fastapi import APIRouter, Depends
from sqlmodel import Field, SQLModel
from ...db import get_session
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
router = APIRouter()
class HistorySummaryTreatmsummaryConference(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
history_id_order: int
history_id_conference: int
summary_treatmsummary_conference_id: int
state: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class SummaryTreatmsummaryConference(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
problem: str
question: str
summary_plan: str
surgeon_summary: str
pre_operation_abg: bool
post_operation_abg: bool
pre_operation_redo_abg: bool
pre_operation_jaw_surgery: bool
pre_operation_computing_design: bool
pre_operation_3d_print: bool
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
class SummaryTreatmsummaryConferenceDoctorMap(SQLModel, table=True):
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
from sqlalchemy import inspect
from sqlalchemy.engine.reflection import Inspector
from sqlmodel import create_engine
def test_create_db_and_table(clear_sqlmodel):
from docs_src.tutorial.create_db_and_table import tutorial003 as mod
mod.sqlite_url = "sqlite://"
mod.engine = | create_engine(mod.sqlite_url) | sqlmodel.create_engine |
from sqlmodel import SQLModel, Field
import uuid as uuid_pkg
from typing import Optional
class FilesBase(SQLModel):
name: str
class Files(FilesBase, table=True):
id: int = | Field(default=None, primary_key=True) | sqlmodel.Field |
from sqlmodel import SQLModel, Field
import uuid as uuid_pkg
from typing import Optional
class FilesBase(SQLModel):
name: str
class Files(FilesBase, table=True):
id: int = Field(default=None, primary_key=True)
uuid: uuid_pkg.UUID = Field(
default_factory=uuid_pkg.uuid4,
index=True,
nullable=False,
)
count_download: int = | Field(default=0) | sqlmodel.Field |
from sqlmodel import Session, select
from database import UserRead, PostCreate, UserCreate, User, Post
from typing import Union
from datetime import datetime
def create_object(
session: Session,
model: Union[User, Post],
request_data: Union[UserCreate, PostCreate],
user: UserRead = None,
isPost: bool = False,
) -> dict:
if isPost:
setattr(request_data, "author_name", user.name)
db_object = model.from_orm(request_data)
if isPost:
setattr(db_object, "updated_at", datetime.utcnow())
setattr(db_object, "created_at", datetime.utcnow())
session.add(db_object)
session.commit()
session.refresh(db_object)
return db_object
def get_objects(
session: Session, model: Union[User, Post], offset: int, limit: int
) -> list:
objects = session.exec(select(model).offset(offset).limit(limit)).all()
return objects
def get_object(
session: Session,
model: Union[User, Post],
criteria: Union[int, str],
isUser: bool = False,
) -> Union[User, Post]:
if isUser:
statement = | select(model) | sqlmodel.select |
from sqlmodel import Session, select
from database import UserRead, PostCreate, UserCreate, User, Post
from typing import Union
from datetime import datetime
def create_object(
session: Session,
model: Union[User, Post],
request_data: Union[UserCreate, PostCreate],
user: UserRead = None,
isPost: bool = False,
) -> dict:
if isPost:
setattr(request_data, "author_name", user.name)
db_object = model.from_orm(request_data)
if isPost:
setattr(db_object, "updated_at", datetime.utcnow())
setattr(db_object, "created_at", datetime.utcnow())
session.add(db_object)
session.commit()
session.refresh(db_object)
return db_object
def get_objects(
session: Session, model: Union[User, Post], offset: int, limit: int
) -> list:
objects = session.exec( | select(model) | sqlmodel.select |
from typing import Optional
from sqlmodel import Field, SQLModel, Relationship, Column
from sqlalchemy_utils.types import TSVectorType
from .db import stand_by_models, stand_by_db
stand_by_models()
class Hero(SQLModel, table=True):
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
from typing import Optional
from sqlmodel import Field, SQLModel, Relationship, Column
from sqlalchemy_utils.types import TSVectorType
from .db import stand_by_models, stand_by_db
stand_by_models()
class Hero(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
content: str
age: Optional[int] = None
search_vector: Optional[str] = Field(
sa_column=Column(
TSVectorType(
"name",
"content",
# weights={"name": "A", "secret_name": "B", "age": "D"},
)
)
)
class Parents(SQLModel, table=True):
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
from typing import Optional
from sqlmodel import Field, SQLModel, Relationship, Column
from sqlalchemy_utils.types import TSVectorType
from .db import stand_by_models, stand_by_db
stand_by_models()
class Hero(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
content: str
age: Optional[int] = None
search_vector: Optional[str] = Field(
sa_column=Column(
TSVectorType(
"name",
"content",
# weights={"name": "A", "secret_name": "B", "age": "D"},
)
)
)
class Parents(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
# children = orm.relationship("Children")
class Children(SQLModel, table=True):
id: Optional[int] = | Field(default=None, primary_key=True) | sqlmodel.Field |
from typing import Optional
from sqlmodel import Field, SQLModel, Relationship, Column
from sqlalchemy_utils.types import TSVectorType
from .db import stand_by_models, stand_by_db
stand_by_models()
class Hero(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
content: str
age: Optional[int] = None
search_vector: Optional[str] = Field(
sa_column=Column(
TSVectorType(
"name",
"content",
# weights={"name": "A", "secret_name": "B", "age": "D"},
)
)
)
class Parents(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
# children = orm.relationship("Children")
class Children(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
name: str
parent_id: Optional[int] = | Field(default=None, foreign_key="parents.id") | sqlmodel.Field |
from sqlmodel import SQLModel, Relationship
from typing import List
from app.models.base_uuid_model import BaseUUIDModel
class RoleBase(SQLModel):
name: str
description: str
class Role(BaseUUIDModel, RoleBase, table=True):
users: List["User"] = | Relationship(back_populates="role", sa_relationship_kwargs={"lazy": "selectin"}) | sqlmodel.Relationship |
from typing import TYPE_CHECKING, Optional
from uuid import UUID
from sqlalchemy.orm import joinedload
from sqlalchemy.schema import Column, ForeignKey
from sqlmodel import Field, Relationship
from sqlmodel.sql.sqltypes import GUID
from joj.horse.models.base import ORMUtils
from joj.horse.utils.base import is_uuid
if TYPE_CHECKING:
from joj.horse.models import Problem, ProblemSet
class ProblemProblemSetLink(ORMUtils, table=True): # type: ignore[call-arg]
__tablename__ = "problem_problem_set_links"
problem_id: UUID = Field(
sa_column=Column(
GUID, ForeignKey("problems.id", ondelete="CASCADE"), primary_key=True
),
)
problem_set_id: UUID = Field(
sa_column=Column(
GUID, ForeignKey("problem_sets.id", ondelete="CASCADE"), primary_key=True
),
)
position: int = Field(
index=True, nullable=False, sa_column_kwargs={"server_default": "0"}
)
problem: "Problem" = | Relationship(back_populates="problem_problem_set_links") | sqlmodel.Relationship |
from typing import TYPE_CHECKING, Optional
from uuid import UUID
from sqlalchemy.orm import joinedload
from sqlalchemy.schema import Column, ForeignKey
from sqlmodel import Field, Relationship
from sqlmodel.sql.sqltypes import GUID
from joj.horse.models.base import ORMUtils
from joj.horse.utils.base import is_uuid
if TYPE_CHECKING:
from joj.horse.models import Problem, ProblemSet
class ProblemProblemSetLink(ORMUtils, table=True): # type: ignore[call-arg]
__tablename__ = "problem_problem_set_links"
problem_id: UUID = Field(
sa_column=Column(
GUID, ForeignKey("problems.id", ondelete="CASCADE"), primary_key=True
),
)
problem_set_id: UUID = Field(
sa_column=Column(
GUID, ForeignKey("problem_sets.id", ondelete="CASCADE"), primary_key=True
),
)
position: int = Field(
index=True, nullable=False, sa_column_kwargs={"server_default": "0"}
)
problem: "Problem" = Relationship(back_populates="problem_problem_set_links")
problem_set: "ProblemSet" = | Relationship(back_populates="problem_problem_set_links") | sqlmodel.Relationship |
from sqlmodel import SQLModel, Field
from typing import Optional, List
from pydantic import validator
# --- model ---
class User(SQLModel, table=True):
id: Optional[int] = | Field(None, primary_key=True) | sqlmodel.Field |
from datetime import datetime
try:
from humps.main import depascalize
from sqlalchemy import Column, DateTime
from sqlalchemy.orm.decl_api import declared_attr
from sqlmodel import Field, SQLModel
except ImportError:
raise RuntimeError(
"SQLModel is not installed. Please install it with `pip install sqlmodel pyhumps`"
)
class Model(SQLModel):
"""
Abstract model providing `id`, `date_created` and `date_updated` fields.
And also automatic table naming to `snake_case`.
"""
id: int = | Field(primary_key=True) | sqlmodel.Field |
import logging
import os
import secrets
import aioredis
import boto3
import pytest
import redis as pyredis
from fastapi.testclient import TestClient
from sqlalchemy import text
from sqlalchemy_utils import create_database, database_exists, drop_database
from sqlmodel import Session, create_engine
from iris.agent.settings import AgentSettings
from iris.api.authentication import (
current_active_user,
current_superuser,
current_verified_user,
)
from iris.api.main import app
from iris.api.settings import APISettings
from iris.commons.clickhouse import ClickHouse
from iris.commons.dependencies import get_settings
from iris.commons.models.base import Base
from iris.commons.redis import Redis
from iris.commons.settings import CommonSettings
from iris.commons.storage import Storage
from iris.commons.utils import json_serializer
from iris.worker import WorkerSettings
pytest.register_assert_rewrite("tests.assertions")
pytest_plugins = ["tests.fixtures.models", "tests.fixtures.storage"]
def should_cleanup():
return os.environ.get("IRIS_TEST_CLEANUP", "") != "0"
@pytest.fixture
def logger():
return logging.getLogger(__name__)
@pytest.fixture
def settings():
namespace = secrets.token_hex(nbytes=4)
print(f"@{namespace}", end=" ")
# Redis has 16 databases by default, we use the last one for testing.
return CommonSettings(
CLICKHOUSE_PUBLIC_USER="public",
CLICKHOUSE_DATABASE="iris_test",
DATABASE_URL=f"postgresql://iris:[email protected]/iris-test-{namespace}",
S3_PREFIX=f"iris-test-{namespace}",
S3_PUBLIC_RESOURCES=["arn:aws:s3:::test-public-exports/*"],
REDIS_NAMESPACE=f"iris-test-{namespace}",
REDIS_URL="redis://default:[email protected]?db=15",
RETRY_TIMEOUT=-1,
)
@pytest.fixture
def api_settings(settings):
return APISettings(
API_CORS_ALLOW_ORIGIN="https://example.org,http://localhost:8000",
**settings.dict(),
)
@pytest.fixture
def agent_settings(settings, tmp_path):
return AgentSettings(
**settings.dict(),
AGENT_CARACAL_SNIFFER_WAIT_TIME=1,
AGENT_MIN_TTL=0,
AGENT_RESULTS_DIR_PATH=tmp_path / "agent_results",
AGENT_TARGETS_DIR_PATH=tmp_path / "agent_targets",
)
@pytest.fixture
def worker_settings(settings, tmp_path):
return WorkerSettings(
**settings.dict(),
WORKER_RESULTS_DIR_PATH=tmp_path / "worker_results",
WORKER_MAX_OPEN_FILES=128,
)
@pytest.fixture
def clickhouse(settings, logger):
return ClickHouse(settings, logger)
@pytest.fixture
def engine(settings):
engine = | create_engine(settings.DATABASE_URL, json_serializer=json_serializer) | sqlmodel.create_engine |
import logging
import os
import secrets
import aioredis
import boto3
import pytest
import redis as pyredis
from fastapi.testclient import TestClient
from sqlalchemy import text
from sqlalchemy_utils import create_database, database_exists, drop_database
from sqlmodel import Session, create_engine
from iris.agent.settings import AgentSettings
from iris.api.authentication import (
current_active_user,
current_superuser,
current_verified_user,
)
from iris.api.main import app
from iris.api.settings import APISettings
from iris.commons.clickhouse import ClickHouse
from iris.commons.dependencies import get_settings
from iris.commons.models.base import Base
from iris.commons.redis import Redis
from iris.commons.settings import CommonSettings
from iris.commons.storage import Storage
from iris.commons.utils import json_serializer
from iris.worker import WorkerSettings
pytest.register_assert_rewrite("tests.assertions")
pytest_plugins = ["tests.fixtures.models", "tests.fixtures.storage"]
def should_cleanup():
return os.environ.get("IRIS_TEST_CLEANUP", "") != "0"
@pytest.fixture
def logger():
return logging.getLogger(__name__)
@pytest.fixture
def settings():
namespace = secrets.token_hex(nbytes=4)
print(f"@{namespace}", end=" ")
# Redis has 16 databases by default, we use the last one for testing.
return CommonSettings(
CLICKHOUSE_PUBLIC_USER="public",
CLICKHOUSE_DATABASE="iris_test",
DATABASE_URL=f"postgresql://iris:[email protected]/iris-test-{namespace}",
S3_PREFIX=f"iris-test-{namespace}",
S3_PUBLIC_RESOURCES=["arn:aws:s3:::test-public-exports/*"],
REDIS_NAMESPACE=f"iris-test-{namespace}",
REDIS_URL="redis://default:[email protected]?db=15",
RETRY_TIMEOUT=-1,
)
@pytest.fixture
def api_settings(settings):
return APISettings(
API_CORS_ALLOW_ORIGIN="https://example.org,http://localhost:8000",
**settings.dict(),
)
@pytest.fixture
def agent_settings(settings, tmp_path):
return AgentSettings(
**settings.dict(),
AGENT_CARACAL_SNIFFER_WAIT_TIME=1,
AGENT_MIN_TTL=0,
AGENT_RESULTS_DIR_PATH=tmp_path / "agent_results",
AGENT_TARGETS_DIR_PATH=tmp_path / "agent_targets",
)
@pytest.fixture
def worker_settings(settings, tmp_path):
return WorkerSettings(
**settings.dict(),
WORKER_RESULTS_DIR_PATH=tmp_path / "worker_results",
WORKER_MAX_OPEN_FILES=128,
)
@pytest.fixture
def clickhouse(settings, logger):
return ClickHouse(settings, logger)
@pytest.fixture
def engine(settings):
engine = create_engine(settings.DATABASE_URL, json_serializer=json_serializer)
if not database_exists(engine.url):
create_database(engine.url)
Base.metadata.create_all(engine)
return engine
@pytest.fixture
async def redis(settings, logger):
client = aioredis.from_url(settings.REDIS_URL, decode_responses=True)
yield Redis(client, settings, logger)
await client.close()
@pytest.fixture
def session(engine):
with | Session(engine) | sqlmodel.Session |
import logging
import os
import secrets
import aioredis
import boto3
import pytest
import redis as pyredis
from fastapi.testclient import TestClient
from sqlalchemy import text
from sqlalchemy_utils import create_database, database_exists, drop_database
from sqlmodel import Session, create_engine
from iris.agent.settings import AgentSettings
from iris.api.authentication import (
current_active_user,
current_superuser,
current_verified_user,
)
from iris.api.main import app
from iris.api.settings import APISettings
from iris.commons.clickhouse import ClickHouse
from iris.commons.dependencies import get_settings
from iris.commons.models.base import Base
from iris.commons.redis import Redis
from iris.commons.settings import CommonSettings
from iris.commons.storage import Storage
from iris.commons.utils import json_serializer
from iris.worker import WorkerSettings
pytest.register_assert_rewrite("tests.assertions")
pytest_plugins = ["tests.fixtures.models", "tests.fixtures.storage"]
def should_cleanup():
return os.environ.get("IRIS_TEST_CLEANUP", "") != "0"
@pytest.fixture
def logger():
return logging.getLogger(__name__)
@pytest.fixture
def settings():
namespace = secrets.token_hex(nbytes=4)
print(f"@{namespace}", end=" ")
# Redis has 16 databases by default, we use the last one for testing.
return CommonSettings(
CLICKHOUSE_PUBLIC_USER="public",
CLICKHOUSE_DATABASE="iris_test",
DATABASE_URL=f"postgresql://iris:[email protected]/iris-test-{namespace}",
S3_PREFIX=f"iris-test-{namespace}",
S3_PUBLIC_RESOURCES=["arn:aws:s3:::test-public-exports/*"],
REDIS_NAMESPACE=f"iris-test-{namespace}",
REDIS_URL="redis://default:[email protected]?db=15",
RETRY_TIMEOUT=-1,
)
@pytest.fixture
def api_settings(settings):
return APISettings(
API_CORS_ALLOW_ORIGIN="https://example.org,http://localhost:8000",
**settings.dict(),
)
@pytest.fixture
def agent_settings(settings, tmp_path):
return AgentSettings(
**settings.dict(),
AGENT_CARACAL_SNIFFER_WAIT_TIME=1,
AGENT_MIN_TTL=0,
AGENT_RESULTS_DIR_PATH=tmp_path / "agent_results",
AGENT_TARGETS_DIR_PATH=tmp_path / "agent_targets",
)
@pytest.fixture
def worker_settings(settings, tmp_path):
return WorkerSettings(
**settings.dict(),
WORKER_RESULTS_DIR_PATH=tmp_path / "worker_results",
WORKER_MAX_OPEN_FILES=128,
)
@pytest.fixture
def clickhouse(settings, logger):
return ClickHouse(settings, logger)
@pytest.fixture
def engine(settings):
engine = create_engine(settings.DATABASE_URL, json_serializer=json_serializer)
if not database_exists(engine.url):
create_database(engine.url)
Base.metadata.create_all(engine)
return engine
@pytest.fixture
async def redis(settings, logger):
client = aioredis.from_url(settings.REDIS_URL, decode_responses=True)
yield Redis(client, settings, logger)
await client.close()
@pytest.fixture
def session(engine):
with Session(engine) as session:
yield session
@pytest.fixture
def storage(settings, logger):
return Storage(settings, logger)
@pytest.fixture
def make_client(engine, api_settings):
def _make_client(user=None):
if user and user.is_active:
app.dependency_overrides[current_active_user] = lambda: user
if user and user.is_active and user.is_verified:
app.dependency_overrides[current_verified_user] = lambda: user
if user and user.is_active and user.is_verified and user.is_superuser:
app.dependency_overrides[current_superuser] = lambda: user
app.dependency_overrides[get_settings] = lambda: api_settings
return TestClient(app)
yield _make_client
app.dependency_overrides.clear()
@pytest.fixture(autouse=True, scope="session")
def cleanup_redis():
yield
if should_cleanup():
redis_ = pyredis.from_url("redis://default:[email protected]?db=15")
redis_.flushdb()
redis_.close()
@pytest.fixture(autouse=True, scope="session")
def cleanup_database():
yield
if should_cleanup():
# TODO: Cleanup/simplify this code.
engine = | create_engine("postgresql://iris:[email protected]") | sqlmodel.create_engine |
import time
from datetime import datetime, timedelta, date
import typer
from sqlalchemy.exc import NoResultFound, OperationalError
from sqlmodel import Session, select, func
from tabulate import tabulate
from . import edit
from . import reports
from .database import create_db_and_tables, engine
from .functions_aux import Status, make_table_view, pop_up_msg
from .tables import ToDo, Timer
app = typer.Typer()
app.add_typer(reports.app, name='report', help='Print customized reports.')
app.add_typer(edit.app, name='edit', help='Edit records.')
@app.command()
def add(task: str, project: str = typer.Option(None, '--project', '-p'),
due_date: datetime = typer.Option(None, '--due-date', '-d',
formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, '--reminder', '-r',
formats=['%Y-%m-%d']),
status: Status = typer.Option(Status.to_do, '--status', '-s'),
tag: str = typer.Option(None, '--tag', '-t')):
"""Add task to the to-do list."""
try:
today = datetime.today()
if due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if reminder is not None and reminder <= today:
typer.secho(f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if due_date is not None and reminder is not None and \
reminder >= due_date:
typer.secho(f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
with Session(engine) as session:
if project is not None:
new_project = session.exec(select(ToDo).where(
ToDo.project == project)).first()
if new_project is not None:
ongoing_project = session.exec(select(ToDo).where(
ToDo.project == project,
ToDo.status != 'done')).first()
if ongoing_project is None:
typer.secho(f'\nTasks already done in the project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
new_entry = ToDo(task=task, project=project,
due_date=due_date, reminder=reminder,
status=status, tag=tag)
session.add(new_entry)
session.commit()
new_id = session.exec(select(func.max(ToDo.id))).one()
typer.secho(f'Add {task}. Task id: {new_id}\n',
fg=typer.colors.GREEN)
except OperationalError:
create_db_and_tables()
add(task=task, project=project, due_date=due_date, reminder=reminder,
status=status, tag=tag)
@app.command()
def start(task_id: int, duration: int = typer.Option(None, '--duration', '-d',
help='Duration in minutes')):
"""Start Timer for a given open task."""
with | Session(engine) | sqlmodel.Session |
import time
from datetime import datetime, timedelta, date
import typer
from sqlalchemy.exc import NoResultFound, OperationalError
from sqlmodel import Session, select, func
from tabulate import tabulate
from . import edit
from . import reports
from .database import create_db_and_tables, engine
from .functions_aux import Status, make_table_view, pop_up_msg
from .tables import ToDo, Timer
app = typer.Typer()
app.add_typer(reports.app, name='report', help='Print customized reports.')
app.add_typer(edit.app, name='edit', help='Edit records.')
@app.command()
def add(task: str, project: str = typer.Option(None, '--project', '-p'),
due_date: datetime = typer.Option(None, '--due-date', '-d',
formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, '--reminder', '-r',
formats=['%Y-%m-%d']),
status: Status = typer.Option(Status.to_do, '--status', '-s'),
tag: str = typer.Option(None, '--tag', '-t')):
"""Add task to the to-do list."""
try:
today = datetime.today()
if due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if reminder is not None and reminder <= today:
typer.secho(f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if due_date is not None and reminder is not None and \
reminder >= due_date:
typer.secho(f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
with Session(engine) as session:
if project is not None:
new_project = session.exec(select(ToDo).where(
ToDo.project == project)).first()
if new_project is not None:
ongoing_project = session.exec(select(ToDo).where(
ToDo.project == project,
ToDo.status != 'done')).first()
if ongoing_project is None:
typer.secho(f'\nTasks already done in the project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
new_entry = ToDo(task=task, project=project,
due_date=due_date, reminder=reminder,
status=status, tag=tag)
session.add(new_entry)
session.commit()
new_id = session.exec(select(func.max(ToDo.id))).one()
typer.secho(f'Add {task}. Task id: {new_id}\n',
fg=typer.colors.GREEN)
except OperationalError:
create_db_and_tables()
add(task=task, project=project, due_date=due_date, reminder=reminder,
status=status, tag=tag)
@app.command()
def start(task_id: int, duration: int = typer.Option(None, '--duration', '-d',
help='Duration in minutes')):
"""Start Timer for a given open task."""
with Session(engine) as session:
try:
session.exec(select(Timer).where(Timer.end == None)).one()
typer.secho('\nThe Timer must be stopped first\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except NoResultFound:
pass
try:
query = session.get(ToDo, task_id)
if not query.status == 'done':
if query.status == 'to do':
query.status = 'doing'
session.add(query)
if duration is not None:
duration = timedelta(minutes=duration)
if duration <= timedelta(minutes=0):
typer.secho(
f'\nDuration must be grater than 0\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
total_seconds = int(duration.total_seconds())
session.add(Timer(id_todo=task_id))
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nTask Start task {task_id}. Timer id: {new_id}\n',
fg=typer.colors.GREEN)
with typer.progressbar(length=total_seconds) as progress:
end = datetime.utcnow() + duration
while datetime.utcnow() < end:
time.sleep(1)
progress.update(1)
else:
typer.secho('\n\nYour Time is over! Well done!\n',
blink=True,
fg=typer.colors.BRIGHT_GREEN)
pop_up_msg()
remark = typer.confirm("Any remark?")
if remark:
remark = typer.prompt('Enter your remarks.')
else:
remark = None
stop(remarks=remark)
typer.Exit()
else:
session.add(Timer(id_todo=task_id))
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nStart task {task_id}. Timer id: {new_id}\n',
fg=typer.colors.GREEN)
else:
typer.secho(f'\nTask already done\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except AttributeError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def stop(remarks: str = typer.Option(None, '--remarks', '-r')):
"""Stop Timer."""
with | Session(engine) | sqlmodel.Session |
import time
from datetime import datetime, timedelta, date
import typer
from sqlalchemy.exc import NoResultFound, OperationalError
from sqlmodel import Session, select, func
from tabulate import tabulate
from . import edit
from . import reports
from .database import create_db_and_tables, engine
from .functions_aux import Status, make_table_view, pop_up_msg
from .tables import ToDo, Timer
app = typer.Typer()
app.add_typer(reports.app, name='report', help='Print customized reports.')
app.add_typer(edit.app, name='edit', help='Edit records.')
@app.command()
def add(task: str, project: str = typer.Option(None, '--project', '-p'),
due_date: datetime = typer.Option(None, '--due-date', '-d',
formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, '--reminder', '-r',
formats=['%Y-%m-%d']),
status: Status = typer.Option(Status.to_do, '--status', '-s'),
tag: str = typer.Option(None, '--tag', '-t')):
"""Add task to the to-do list."""
try:
today = datetime.today()
if due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if reminder is not None and reminder <= today:
typer.secho(f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if due_date is not None and reminder is not None and \
reminder >= due_date:
typer.secho(f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
with | Session(engine) | sqlmodel.Session |
import time
from datetime import datetime, timedelta, date
import typer
from sqlalchemy.exc import NoResultFound, OperationalError
from sqlmodel import Session, select, func
from tabulate import tabulate
from . import edit
from . import reports
from .database import create_db_and_tables, engine
from .functions_aux import Status, make_table_view, pop_up_msg
from .tables import ToDo, Timer
app = typer.Typer()
app.add_typer(reports.app, name='report', help='Print customized reports.')
app.add_typer(edit.app, name='edit', help='Edit records.')
@app.command()
def add(task: str, project: str = typer.Option(None, '--project', '-p'),
due_date: datetime = typer.Option(None, '--due-date', '-d',
formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, '--reminder', '-r',
formats=['%Y-%m-%d']),
status: Status = typer.Option(Status.to_do, '--status', '-s'),
tag: str = typer.Option(None, '--tag', '-t')):
"""Add task to the to-do list."""
try:
today = datetime.today()
if due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if reminder is not None and reminder <= today:
typer.secho(f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if due_date is not None and reminder is not None and \
reminder >= due_date:
typer.secho(f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
with Session(engine) as session:
if project is not None:
new_project = session.exec(select(ToDo).where(
ToDo.project == project)).first()
if new_project is not None:
ongoing_project = session.exec(select(ToDo).where(
ToDo.project == project,
ToDo.status != 'done')).first()
if ongoing_project is None:
typer.secho(f'\nTasks already done in the project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
new_entry = ToDo(task=task, project=project,
due_date=due_date, reminder=reminder,
status=status, tag=tag)
session.add(new_entry)
session.commit()
new_id = session.exec(select(func.max(ToDo.id))).one()
typer.secho(f'Add {task}. Task id: {new_id}\n',
fg=typer.colors.GREEN)
except OperationalError:
create_db_and_tables()
add(task=task, project=project, due_date=due_date, reminder=reminder,
status=status, tag=tag)
@app.command()
def start(task_id: int, duration: int = typer.Option(None, '--duration', '-d',
help='Duration in minutes')):
"""Start Timer for a given open task."""
with Session(engine) as session:
try:
session.exec(select(Timer).where(Timer.end == None)).one()
typer.secho('\nThe Timer must be stopped first\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except NoResultFound:
pass
try:
query = session.get(ToDo, task_id)
if not query.status == 'done':
if query.status == 'to do':
query.status = 'doing'
session.add(query)
if duration is not None:
duration = timedelta(minutes=duration)
if duration <= timedelta(minutes=0):
typer.secho(
f'\nDuration must be grater than 0\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
total_seconds = int(duration.total_seconds())
session.add(Timer(id_todo=task_id))
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nTask Start task {task_id}. Timer id: {new_id}\n',
fg=typer.colors.GREEN)
with typer.progressbar(length=total_seconds) as progress:
end = datetime.utcnow() + duration
while datetime.utcnow() < end:
time.sleep(1)
progress.update(1)
else:
typer.secho('\n\nYour Time is over! Well done!\n',
blink=True,
fg=typer.colors.BRIGHT_GREEN)
pop_up_msg()
remark = typer.confirm("Any remark?")
if remark:
remark = typer.prompt('Enter your remarks.')
else:
remark = None
stop(remarks=remark)
typer.Exit()
else:
session.add(Timer(id_todo=task_id))
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nStart task {task_id}. Timer id: {new_id}\n',
fg=typer.colors.GREEN)
else:
typer.secho(f'\nTask already done\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except AttributeError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def stop(remarks: str = typer.Option(None, '--remarks', '-r')):
"""Stop Timer."""
with Session(engine) as session:
try:
query_timer = session.exec(
select(Timer).where(Timer.end == None)).one()
query_timer.end = datetime.utcnow()
query_timer.duration = query_timer.end - query_timer.start
session.add(query_timer)
query = session.get(ToDo, query_timer.id_todo)
check = typer.confirm('Is the task done?')
if not check and not remarks:
pass
else:
if check:
query.status = 'done'
query.date_end = query_timer.end.date()
if remarks:
query.remarks = remarks
session.add(query)
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nStop task ({query.id}). Timer id: {new_id}\n',
fg=typer.colors.GREEN)
except NoResultFound:
typer.secho(f'\nNo task running\n', fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def view(due_date: datetime = typer.Option(datetime.today() +
timedelta(weeks=1),
formats=['%Y-%m-%d'])):
"""Print to-do list view."""
overdue = | select(ToDo) | sqlmodel.select |
import time
from datetime import datetime, timedelta, date
import typer
from sqlalchemy.exc import NoResultFound, OperationalError
from sqlmodel import Session, select, func
from tabulate import tabulate
from . import edit
from . import reports
from .database import create_db_and_tables, engine
from .functions_aux import Status, make_table_view, pop_up_msg
from .tables import ToDo, Timer
app = typer.Typer()
app.add_typer(reports.app, name='report', help='Print customized reports.')
app.add_typer(edit.app, name='edit', help='Edit records.')
@app.command()
def add(task: str, project: str = typer.Option(None, '--project', '-p'),
due_date: datetime = typer.Option(None, '--due-date', '-d',
formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, '--reminder', '-r',
formats=['%Y-%m-%d']),
status: Status = typer.Option(Status.to_do, '--status', '-s'),
tag: str = typer.Option(None, '--tag', '-t')):
"""Add task to the to-do list."""
try:
today = datetime.today()
if due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if reminder is not None and reminder <= today:
typer.secho(f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if due_date is not None and reminder is not None and \
reminder >= due_date:
typer.secho(f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
with Session(engine) as session:
if project is not None:
new_project = session.exec(select(ToDo).where(
ToDo.project == project)).first()
if new_project is not None:
ongoing_project = session.exec(select(ToDo).where(
ToDo.project == project,
ToDo.status != 'done')).first()
if ongoing_project is None:
typer.secho(f'\nTasks already done in the project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
new_entry = ToDo(task=task, project=project,
due_date=due_date, reminder=reminder,
status=status, tag=tag)
session.add(new_entry)
session.commit()
new_id = session.exec(select(func.max(ToDo.id))).one()
typer.secho(f'Add {task}. Task id: {new_id}\n',
fg=typer.colors.GREEN)
except OperationalError:
create_db_and_tables()
add(task=task, project=project, due_date=due_date, reminder=reminder,
status=status, tag=tag)
@app.command()
def start(task_id: int, duration: int = typer.Option(None, '--duration', '-d',
help='Duration in minutes')):
"""Start Timer for a given open task."""
with Session(engine) as session:
try:
session.exec(select(Timer).where(Timer.end == None)).one()
typer.secho('\nThe Timer must be stopped first\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except NoResultFound:
pass
try:
query = session.get(ToDo, task_id)
if not query.status == 'done':
if query.status == 'to do':
query.status = 'doing'
session.add(query)
if duration is not None:
duration = timedelta(minutes=duration)
if duration <= timedelta(minutes=0):
typer.secho(
f'\nDuration must be grater than 0\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
total_seconds = int(duration.total_seconds())
session.add(Timer(id_todo=task_id))
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nTask Start task {task_id}. Timer id: {new_id}\n',
fg=typer.colors.GREEN)
with typer.progressbar(length=total_seconds) as progress:
end = datetime.utcnow() + duration
while datetime.utcnow() < end:
time.sleep(1)
progress.update(1)
else:
typer.secho('\n\nYour Time is over! Well done!\n',
blink=True,
fg=typer.colors.BRIGHT_GREEN)
pop_up_msg()
remark = typer.confirm("Any remark?")
if remark:
remark = typer.prompt('Enter your remarks.')
else:
remark = None
stop(remarks=remark)
typer.Exit()
else:
session.add(Timer(id_todo=task_id))
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nStart task {task_id}. Timer id: {new_id}\n',
fg=typer.colors.GREEN)
else:
typer.secho(f'\nTask already done\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except AttributeError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def stop(remarks: str = typer.Option(None, '--remarks', '-r')):
"""Stop Timer."""
with Session(engine) as session:
try:
query_timer = session.exec(
select(Timer).where(Timer.end == None)).one()
query_timer.end = datetime.utcnow()
query_timer.duration = query_timer.end - query_timer.start
session.add(query_timer)
query = session.get(ToDo, query_timer.id_todo)
check = typer.confirm('Is the task done?')
if not check and not remarks:
pass
else:
if check:
query.status = 'done'
query.date_end = query_timer.end.date()
if remarks:
query.remarks = remarks
session.add(query)
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nStop task ({query.id}). Timer id: {new_id}\n',
fg=typer.colors.GREEN)
except NoResultFound:
typer.secho(f'\nNo task running\n', fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def view(due_date: datetime = typer.Option(datetime.today() +
timedelta(weeks=1),
formats=['%Y-%m-%d'])):
"""Print to-do list view."""
overdue = select(ToDo).where(ToDo.due_date < date.today(),
ToDo.status != 'done').order_by(ToDo.due_date)
reminders = | select(ToDo) | sqlmodel.select |
import time
from datetime import datetime, timedelta, date
import typer
from sqlalchemy.exc import NoResultFound, OperationalError
from sqlmodel import Session, select, func
from tabulate import tabulate
from . import edit
from . import reports
from .database import create_db_and_tables, engine
from .functions_aux import Status, make_table_view, pop_up_msg
from .tables import ToDo, Timer
app = typer.Typer()
app.add_typer(reports.app, name='report', help='Print customized reports.')
app.add_typer(edit.app, name='edit', help='Edit records.')
@app.command()
def add(task: str, project: str = typer.Option(None, '--project', '-p'),
due_date: datetime = typer.Option(None, '--due-date', '-d',
formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, '--reminder', '-r',
formats=['%Y-%m-%d']),
status: Status = typer.Option(Status.to_do, '--status', '-s'),
tag: str = typer.Option(None, '--tag', '-t')):
"""Add task to the to-do list."""
try:
today = datetime.today()
if due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if reminder is not None and reminder <= today:
typer.secho(f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if due_date is not None and reminder is not None and \
reminder >= due_date:
typer.secho(f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
with Session(engine) as session:
if project is not None:
new_project = session.exec(select(ToDo).where(
ToDo.project == project)).first()
if new_project is not None:
ongoing_project = session.exec(select(ToDo).where(
ToDo.project == project,
ToDo.status != 'done')).first()
if ongoing_project is None:
typer.secho(f'\nTasks already done in the project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
new_entry = ToDo(task=task, project=project,
due_date=due_date, reminder=reminder,
status=status, tag=tag)
session.add(new_entry)
session.commit()
new_id = session.exec(select(func.max(ToDo.id))).one()
typer.secho(f'Add {task}. Task id: {new_id}\n',
fg=typer.colors.GREEN)
except OperationalError:
create_db_and_tables()
add(task=task, project=project, due_date=due_date, reminder=reminder,
status=status, tag=tag)
@app.command()
def start(task_id: int, duration: int = typer.Option(None, '--duration', '-d',
help='Duration in minutes')):
"""Start Timer for a given open task."""
with Session(engine) as session:
try:
session.exec(select(Timer).where(Timer.end == None)).one()
typer.secho('\nThe Timer must be stopped first\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except NoResultFound:
pass
try:
query = session.get(ToDo, task_id)
if not query.status == 'done':
if query.status == 'to do':
query.status = 'doing'
session.add(query)
if duration is not None:
duration = timedelta(minutes=duration)
if duration <= timedelta(minutes=0):
typer.secho(
f'\nDuration must be grater than 0\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
total_seconds = int(duration.total_seconds())
session.add(Timer(id_todo=task_id))
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nTask Start task {task_id}. Timer id: {new_id}\n',
fg=typer.colors.GREEN)
with typer.progressbar(length=total_seconds) as progress:
end = datetime.utcnow() + duration
while datetime.utcnow() < end:
time.sleep(1)
progress.update(1)
else:
typer.secho('\n\nYour Time is over! Well done!\n',
blink=True,
fg=typer.colors.BRIGHT_GREEN)
pop_up_msg()
remark = typer.confirm("Any remark?")
if remark:
remark = typer.prompt('Enter your remarks.')
else:
remark = None
stop(remarks=remark)
typer.Exit()
else:
session.add(Timer(id_todo=task_id))
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nStart task {task_id}. Timer id: {new_id}\n',
fg=typer.colors.GREEN)
else:
typer.secho(f'\nTask already done\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except AttributeError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def stop(remarks: str = typer.Option(None, '--remarks', '-r')):
"""Stop Timer."""
with Session(engine) as session:
try:
query_timer = session.exec(
select(Timer).where(Timer.end == None)).one()
query_timer.end = datetime.utcnow()
query_timer.duration = query_timer.end - query_timer.start
session.add(query_timer)
query = session.get(ToDo, query_timer.id_todo)
check = typer.confirm('Is the task done?')
if not check and not remarks:
pass
else:
if check:
query.status = 'done'
query.date_end = query_timer.end.date()
if remarks:
query.remarks = remarks
session.add(query)
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nStop task ({query.id}). Timer id: {new_id}\n',
fg=typer.colors.GREEN)
except NoResultFound:
typer.secho(f'\nNo task running\n', fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def view(due_date: datetime = typer.Option(datetime.today() +
timedelta(weeks=1),
formats=['%Y-%m-%d'])):
"""Print to-do list view."""
overdue = select(ToDo).where(ToDo.due_date < date.today(),
ToDo.status != 'done').order_by(ToDo.due_date)
reminders = select(ToDo).where(ToDo.reminder <= date.today(),
ToDo.status != 'done').order_by(
ToDo.due_date)
due_in = | select(ToDo) | sqlmodel.select |
import time
from datetime import datetime, timedelta, date
import typer
from sqlalchemy.exc import NoResultFound, OperationalError
from sqlmodel import Session, select, func
from tabulate import tabulate
from . import edit
from . import reports
from .database import create_db_and_tables, engine
from .functions_aux import Status, make_table_view, pop_up_msg
from .tables import ToDo, Timer
app = typer.Typer()
app.add_typer(reports.app, name='report', help='Print customized reports.')
app.add_typer(edit.app, name='edit', help='Edit records.')
@app.command()
def add(task: str, project: str = typer.Option(None, '--project', '-p'),
due_date: datetime = typer.Option(None, '--due-date', '-d',
formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, '--reminder', '-r',
formats=['%Y-%m-%d']),
status: Status = typer.Option(Status.to_do, '--status', '-s'),
tag: str = typer.Option(None, '--tag', '-t')):
"""Add task to the to-do list."""
try:
today = datetime.today()
if due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if reminder is not None and reminder <= today:
typer.secho(f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if due_date is not None and reminder is not None and \
reminder >= due_date:
typer.secho(f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
with Session(engine) as session:
if project is not None:
new_project = session.exec(select(ToDo).where(
ToDo.project == project)).first()
if new_project is not None:
ongoing_project = session.exec(select(ToDo).where(
ToDo.project == project,
ToDo.status != 'done')).first()
if ongoing_project is None:
typer.secho(f'\nTasks already done in the project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
new_entry = ToDo(task=task, project=project,
due_date=due_date, reminder=reminder,
status=status, tag=tag)
session.add(new_entry)
session.commit()
new_id = session.exec(select(func.max(ToDo.id))).one()
typer.secho(f'Add {task}. Task id: {new_id}\n',
fg=typer.colors.GREEN)
except OperationalError:
create_db_and_tables()
add(task=task, project=project, due_date=due_date, reminder=reminder,
status=status, tag=tag)
@app.command()
def start(task_id: int, duration: int = typer.Option(None, '--duration', '-d',
help='Duration in minutes')):
"""Start Timer for a given open task."""
with Session(engine) as session:
try:
session.exec(select(Timer).where(Timer.end == None)).one()
typer.secho('\nThe Timer must be stopped first\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except NoResultFound:
pass
try:
query = session.get(ToDo, task_id)
if not query.status == 'done':
if query.status == 'to do':
query.status = 'doing'
session.add(query)
if duration is not None:
duration = timedelta(minutes=duration)
if duration <= timedelta(minutes=0):
typer.secho(
f'\nDuration must be grater than 0\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
total_seconds = int(duration.total_seconds())
session.add(Timer(id_todo=task_id))
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nTask Start task {task_id}. Timer id: {new_id}\n',
fg=typer.colors.GREEN)
with typer.progressbar(length=total_seconds) as progress:
end = datetime.utcnow() + duration
while datetime.utcnow() < end:
time.sleep(1)
progress.update(1)
else:
typer.secho('\n\nYour Time is over! Well done!\n',
blink=True,
fg=typer.colors.BRIGHT_GREEN)
pop_up_msg()
remark = typer.confirm("Any remark?")
if remark:
remark = typer.prompt('Enter your remarks.')
else:
remark = None
stop(remarks=remark)
typer.Exit()
else:
session.add(Timer(id_todo=task_id))
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nStart task {task_id}. Timer id: {new_id}\n',
fg=typer.colors.GREEN)
else:
typer.secho(f'\nTask already done\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except AttributeError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def stop(remarks: str = typer.Option(None, '--remarks', '-r')):
"""Stop Timer."""
with Session(engine) as session:
try:
query_timer = session.exec(
select(Timer).where(Timer.end == None)).one()
query_timer.end = datetime.utcnow()
query_timer.duration = query_timer.end - query_timer.start
session.add(query_timer)
query = session.get(ToDo, query_timer.id_todo)
check = typer.confirm('Is the task done?')
if not check and not remarks:
pass
else:
if check:
query.status = 'done'
query.date_end = query_timer.end.date()
if remarks:
query.remarks = remarks
session.add(query)
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nStop task ({query.id}). Timer id: {new_id}\n',
fg=typer.colors.GREEN)
except NoResultFound:
typer.secho(f'\nNo task running\n', fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def view(due_date: datetime = typer.Option(datetime.today() +
timedelta(weeks=1),
formats=['%Y-%m-%d'])):
"""Print to-do list view."""
overdue = select(ToDo).where(ToDo.due_date < date.today(),
ToDo.status != 'done').order_by(ToDo.due_date)
reminders = select(ToDo).where(ToDo.reminder <= date.today(),
ToDo.status != 'done').order_by(
ToDo.due_date)
due_in = select(ToDo).where(
ToDo.due_date < due_date, ToDo.due_date >= date.today(),
ToDo.status != 'done').order_by(ToDo.due_date)
no_due = | select(ToDo) | sqlmodel.select |
import time
from datetime import datetime, timedelta, date
import typer
from sqlalchemy.exc import NoResultFound, OperationalError
from sqlmodel import Session, select, func
from tabulate import tabulate
from . import edit
from . import reports
from .database import create_db_and_tables, engine
from .functions_aux import Status, make_table_view, pop_up_msg
from .tables import ToDo, Timer
app = typer.Typer()
app.add_typer(reports.app, name='report', help='Print customized reports.')
app.add_typer(edit.app, name='edit', help='Edit records.')
@app.command()
def add(task: str, project: str = typer.Option(None, '--project', '-p'),
due_date: datetime = typer.Option(None, '--due-date', '-d',
formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, '--reminder', '-r',
formats=['%Y-%m-%d']),
status: Status = typer.Option(Status.to_do, '--status', '-s'),
tag: str = typer.Option(None, '--tag', '-t')):
"""Add task to the to-do list."""
try:
today = datetime.today()
if due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if reminder is not None and reminder <= today:
typer.secho(f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if due_date is not None and reminder is not None and \
reminder >= due_date:
typer.secho(f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
with Session(engine) as session:
if project is not None:
new_project = session.exec(select(ToDo).where(
ToDo.project == project)).first()
if new_project is not None:
ongoing_project = session.exec(select(ToDo).where(
ToDo.project == project,
ToDo.status != 'done')).first()
if ongoing_project is None:
typer.secho(f'\nTasks already done in the project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
new_entry = ToDo(task=task, project=project,
due_date=due_date, reminder=reminder,
status=status, tag=tag)
session.add(new_entry)
session.commit()
new_id = session.exec(select( | func.max(ToDo.id) | sqlmodel.func.max |
import time
from datetime import datetime, timedelta, date
import typer
from sqlalchemy.exc import NoResultFound, OperationalError
from sqlmodel import Session, select, func
from tabulate import tabulate
from . import edit
from . import reports
from .database import create_db_and_tables, engine
from .functions_aux import Status, make_table_view, pop_up_msg
from .tables import ToDo, Timer
app = typer.Typer()
app.add_typer(reports.app, name='report', help='Print customized reports.')
app.add_typer(edit.app, name='edit', help='Edit records.')
@app.command()
def add(task: str, project: str = typer.Option(None, '--project', '-p'),
due_date: datetime = typer.Option(None, '--due-date', '-d',
formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, '--reminder', '-r',
formats=['%Y-%m-%d']),
status: Status = typer.Option(Status.to_do, '--status', '-s'),
tag: str = typer.Option(None, '--tag', '-t')):
"""Add task to the to-do list."""
try:
today = datetime.today()
if due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if reminder is not None and reminder <= today:
typer.secho(f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if due_date is not None and reminder is not None and \
reminder >= due_date:
typer.secho(f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
with Session(engine) as session:
if project is not None:
new_project = session.exec(select(ToDo).where(
ToDo.project == project)).first()
if new_project is not None:
ongoing_project = session.exec(select(ToDo).where(
ToDo.project == project,
ToDo.status != 'done')).first()
if ongoing_project is None:
typer.secho(f'\nTasks already done in the project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
new_entry = ToDo(task=task, project=project,
due_date=due_date, reminder=reminder,
status=status, tag=tag)
session.add(new_entry)
session.commit()
new_id = session.exec(select(func.max(ToDo.id))).one()
typer.secho(f'Add {task}. Task id: {new_id}\n',
fg=typer.colors.GREEN)
except OperationalError:
create_db_and_tables()
add(task=task, project=project, due_date=due_date, reminder=reminder,
status=status, tag=tag)
@app.command()
def start(task_id: int, duration: int = typer.Option(None, '--duration', '-d',
help='Duration in minutes')):
"""Start Timer for a given open task."""
with Session(engine) as session:
try:
session.exec(select(Timer).where(Timer.end == None)).one()
typer.secho('\nThe Timer must be stopped first\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except NoResultFound:
pass
try:
query = session.get(ToDo, task_id)
if not query.status == 'done':
if query.status == 'to do':
query.status = 'doing'
session.add(query)
if duration is not None:
duration = timedelta(minutes=duration)
if duration <= timedelta(minutes=0):
typer.secho(
f'\nDuration must be grater than 0\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
total_seconds = int(duration.total_seconds())
session.add(Timer(id_todo=task_id))
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nTask Start task {task_id}. Timer id: {new_id}\n',
fg=typer.colors.GREEN)
with typer.progressbar(length=total_seconds) as progress:
end = datetime.utcnow() + duration
while datetime.utcnow() < end:
time.sleep(1)
progress.update(1)
else:
typer.secho('\n\nYour Time is over! Well done!\n',
blink=True,
fg=typer.colors.BRIGHT_GREEN)
pop_up_msg()
remark = typer.confirm("Any remark?")
if remark:
remark = typer.prompt('Enter your remarks.')
else:
remark = None
stop(remarks=remark)
typer.Exit()
else:
session.add(Timer(id_todo=task_id))
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nStart task {task_id}. Timer id: {new_id}\n',
fg=typer.colors.GREEN)
else:
typer.secho(f'\nTask already done\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except AttributeError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def stop(remarks: str = typer.Option(None, '--remarks', '-r')):
"""Stop Timer."""
with Session(engine) as session:
try:
query_timer = session.exec(
select(Timer).where(Timer.end == None)).one()
query_timer.end = datetime.utcnow()
query_timer.duration = query_timer.end - query_timer.start
session.add(query_timer)
query = session.get(ToDo, query_timer.id_todo)
check = typer.confirm('Is the task done?')
if not check and not remarks:
pass
else:
if check:
query.status = 'done'
query.date_end = query_timer.end.date()
if remarks:
query.remarks = remarks
session.add(query)
session.commit()
new_id = session.exec(select( | func.max(Timer.id) | sqlmodel.func.max |
import time
from datetime import datetime, timedelta, date
import typer
from sqlalchemy.exc import NoResultFound, OperationalError
from sqlmodel import Session, select, func
from tabulate import tabulate
from . import edit
from . import reports
from .database import create_db_and_tables, engine
from .functions_aux import Status, make_table_view, pop_up_msg
from .tables import ToDo, Timer
app = typer.Typer()
app.add_typer(reports.app, name='report', help='Print customized reports.')
app.add_typer(edit.app, name='edit', help='Edit records.')
@app.command()
def add(task: str, project: str = typer.Option(None, '--project', '-p'),
due_date: datetime = typer.Option(None, '--due-date', '-d',
formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, '--reminder', '-r',
formats=['%Y-%m-%d']),
status: Status = typer.Option(Status.to_do, '--status', '-s'),
tag: str = typer.Option(None, '--tag', '-t')):
"""Add task to the to-do list."""
try:
today = datetime.today()
if due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if reminder is not None and reminder <= today:
typer.secho(f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if due_date is not None and reminder is not None and \
reminder >= due_date:
typer.secho(f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
with Session(engine) as session:
if project is not None:
new_project = session.exec(select(ToDo).where(
ToDo.project == project)).first()
if new_project is not None:
ongoing_project = session.exec(select(ToDo).where(
ToDo.project == project,
ToDo.status != 'done')).first()
if ongoing_project is None:
typer.secho(f'\nTasks already done in the project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
new_entry = ToDo(task=task, project=project,
due_date=due_date, reminder=reminder,
status=status, tag=tag)
session.add(new_entry)
session.commit()
new_id = session.exec(select(func.max(ToDo.id))).one()
typer.secho(f'Add {task}. Task id: {new_id}\n',
fg=typer.colors.GREEN)
except OperationalError:
create_db_and_tables()
add(task=task, project=project, due_date=due_date, reminder=reminder,
status=status, tag=tag)
@app.command()
def start(task_id: int, duration: int = typer.Option(None, '--duration', '-d',
help='Duration in minutes')):
"""Start Timer for a given open task."""
with Session(engine) as session:
try:
session.exec( | select(Timer) | sqlmodel.select |
import time
from datetime import datetime, timedelta, date
import typer
from sqlalchemy.exc import NoResultFound, OperationalError
from sqlmodel import Session, select, func
from tabulate import tabulate
from . import edit
from . import reports
from .database import create_db_and_tables, engine
from .functions_aux import Status, make_table_view, pop_up_msg
from .tables import ToDo, Timer
app = typer.Typer()
app.add_typer(reports.app, name='report', help='Print customized reports.')
app.add_typer(edit.app, name='edit', help='Edit records.')
@app.command()
def add(task: str, project: str = typer.Option(None, '--project', '-p'),
due_date: datetime = typer.Option(None, '--due-date', '-d',
formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, '--reminder', '-r',
formats=['%Y-%m-%d']),
status: Status = typer.Option(Status.to_do, '--status', '-s'),
tag: str = typer.Option(None, '--tag', '-t')):
"""Add task to the to-do list."""
try:
today = datetime.today()
if due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if reminder is not None and reminder <= today:
typer.secho(f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if due_date is not None and reminder is not None and \
reminder >= due_date:
typer.secho(f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
with Session(engine) as session:
if project is not None:
new_project = session.exec(select(ToDo).where(
ToDo.project == project)).first()
if new_project is not None:
ongoing_project = session.exec(select(ToDo).where(
ToDo.project == project,
ToDo.status != 'done')).first()
if ongoing_project is None:
typer.secho(f'\nTasks already done in the project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
new_entry = ToDo(task=task, project=project,
due_date=due_date, reminder=reminder,
status=status, tag=tag)
session.add(new_entry)
session.commit()
new_id = session.exec(select(func.max(ToDo.id))).one()
typer.secho(f'Add {task}. Task id: {new_id}\n',
fg=typer.colors.GREEN)
except OperationalError:
create_db_and_tables()
add(task=task, project=project, due_date=due_date, reminder=reminder,
status=status, tag=tag)
@app.command()
def start(task_id: int, duration: int = typer.Option(None, '--duration', '-d',
help='Duration in minutes')):
"""Start Timer for a given open task."""
with Session(engine) as session:
try:
session.exec(select(Timer).where(Timer.end == None)).one()
typer.secho('\nThe Timer must be stopped first\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except NoResultFound:
pass
try:
query = session.get(ToDo, task_id)
if not query.status == 'done':
if query.status == 'to do':
query.status = 'doing'
session.add(query)
if duration is not None:
duration = timedelta(minutes=duration)
if duration <= timedelta(minutes=0):
typer.secho(
f'\nDuration must be grater than 0\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
total_seconds = int(duration.total_seconds())
session.add(Timer(id_todo=task_id))
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nTask Start task {task_id}. Timer id: {new_id}\n',
fg=typer.colors.GREEN)
with typer.progressbar(length=total_seconds) as progress:
end = datetime.utcnow() + duration
while datetime.utcnow() < end:
time.sleep(1)
progress.update(1)
else:
typer.secho('\n\nYour Time is over! Well done!\n',
blink=True,
fg=typer.colors.BRIGHT_GREEN)
pop_up_msg()
remark = typer.confirm("Any remark?")
if remark:
remark = typer.prompt('Enter your remarks.')
else:
remark = None
stop(remarks=remark)
typer.Exit()
else:
session.add(Timer(id_todo=task_id))
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nStart task {task_id}. Timer id: {new_id}\n',
fg=typer.colors.GREEN)
else:
typer.secho(f'\nTask already done\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except AttributeError:
typer.secho(f'\nInvalid task id\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
@app.command()
def stop(remarks: str = typer.Option(None, '--remarks', '-r')):
"""Stop Timer."""
with Session(engine) as session:
try:
query_timer = session.exec(
| select(Timer) | sqlmodel.select |
import time
from datetime import datetime, timedelta, date
import typer
from sqlalchemy.exc import NoResultFound, OperationalError
from sqlmodel import Session, select, func
from tabulate import tabulate
from . import edit
from . import reports
from .database import create_db_and_tables, engine
from .functions_aux import Status, make_table_view, pop_up_msg
from .tables import ToDo, Timer
app = typer.Typer()
app.add_typer(reports.app, name='report', help='Print customized reports.')
app.add_typer(edit.app, name='edit', help='Edit records.')
@app.command()
def add(task: str, project: str = typer.Option(None, '--project', '-p'),
due_date: datetime = typer.Option(None, '--due-date', '-d',
formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, '--reminder', '-r',
formats=['%Y-%m-%d']),
status: Status = typer.Option(Status.to_do, '--status', '-s'),
tag: str = typer.Option(None, '--tag', '-t')):
"""Add task to the to-do list."""
try:
today = datetime.today()
if due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if reminder is not None and reminder <= today:
typer.secho(f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if due_date is not None and reminder is not None and \
reminder >= due_date:
typer.secho(f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
with Session(engine) as session:
if project is not None:
new_project = session.exec( | select(ToDo) | sqlmodel.select |
import time
from datetime import datetime, timedelta, date
import typer
from sqlalchemy.exc import NoResultFound, OperationalError
from sqlmodel import Session, select, func
from tabulate import tabulate
from . import edit
from . import reports
from .database import create_db_and_tables, engine
from .functions_aux import Status, make_table_view, pop_up_msg
from .tables import ToDo, Timer
app = typer.Typer()
app.add_typer(reports.app, name='report', help='Print customized reports.')
app.add_typer(edit.app, name='edit', help='Edit records.')
@app.command()
def add(task: str, project: str = typer.Option(None, '--project', '-p'),
due_date: datetime = typer.Option(None, '--due-date', '-d',
formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, '--reminder', '-r',
formats=['%Y-%m-%d']),
status: Status = typer.Option(Status.to_do, '--status', '-s'),
tag: str = typer.Option(None, '--tag', '-t')):
"""Add task to the to-do list."""
try:
today = datetime.today()
if due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if reminder is not None and reminder <= today:
typer.secho(f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if due_date is not None and reminder is not None and \
reminder >= due_date:
typer.secho(f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
with Session(engine) as session:
if project is not None:
new_project = session.exec(select(ToDo).where(
ToDo.project == project)).first()
if new_project is not None:
ongoing_project = session.exec(select(ToDo).where(
ToDo.project == project,
ToDo.status != 'done')).first()
if ongoing_project is None:
typer.secho(f'\nTasks already done in the project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
new_entry = ToDo(task=task, project=project,
due_date=due_date, reminder=reminder,
status=status, tag=tag)
session.add(new_entry)
session.commit()
new_id = session.exec(select(func.max(ToDo.id))).one()
typer.secho(f'Add {task}. Task id: {new_id}\n',
fg=typer.colors.GREEN)
except OperationalError:
create_db_and_tables()
add(task=task, project=project, due_date=due_date, reminder=reminder,
status=status, tag=tag)
@app.command()
def start(task_id: int, duration: int = typer.Option(None, '--duration', '-d',
help='Duration in minutes')):
"""Start Timer for a given open task."""
with Session(engine) as session:
try:
session.exec(select(Timer).where(Timer.end == None)).one()
typer.secho('\nThe Timer must be stopped first\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except NoResultFound:
pass
try:
query = session.get(ToDo, task_id)
if not query.status == 'done':
if query.status == 'to do':
query.status = 'doing'
session.add(query)
if duration is not None:
duration = timedelta(minutes=duration)
if duration <= timedelta(minutes=0):
typer.secho(
f'\nDuration must be grater than 0\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
total_seconds = int(duration.total_seconds())
session.add(Timer(id_todo=task_id))
session.commit()
new_id = session.exec(select( | func.max(Timer.id) | sqlmodel.func.max |
import time
from datetime import datetime, timedelta, date
import typer
from sqlalchemy.exc import NoResultFound, OperationalError
from sqlmodel import Session, select, func
from tabulate import tabulate
from . import edit
from . import reports
from .database import create_db_and_tables, engine
from .functions_aux import Status, make_table_view, pop_up_msg
from .tables import ToDo, Timer
app = typer.Typer()
app.add_typer(reports.app, name='report', help='Print customized reports.')
app.add_typer(edit.app, name='edit', help='Edit records.')
@app.command()
def add(task: str, project: str = typer.Option(None, '--project', '-p'),
due_date: datetime = typer.Option(None, '--due-date', '-d',
formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, '--reminder', '-r',
formats=['%Y-%m-%d']),
status: Status = typer.Option(Status.to_do, '--status', '-s'),
tag: str = typer.Option(None, '--tag', '-t')):
"""Add task to the to-do list."""
try:
today = datetime.today()
if due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if reminder is not None and reminder <= today:
typer.secho(f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if due_date is not None and reminder is not None and \
reminder >= due_date:
typer.secho(f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
with Session(engine) as session:
if project is not None:
new_project = session.exec(select(ToDo).where(
ToDo.project == project)).first()
if new_project is not None:
ongoing_project = session.exec(select(ToDo).where(
ToDo.project == project,
ToDo.status != 'done')).first()
if ongoing_project is None:
typer.secho(f'\nTasks already done in the project\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
new_entry = ToDo(task=task, project=project,
due_date=due_date, reminder=reminder,
status=status, tag=tag)
session.add(new_entry)
session.commit()
new_id = session.exec(select(func.max(ToDo.id))).one()
typer.secho(f'Add {task}. Task id: {new_id}\n',
fg=typer.colors.GREEN)
except OperationalError:
create_db_and_tables()
add(task=task, project=project, due_date=due_date, reminder=reminder,
status=status, tag=tag)
@app.command()
def start(task_id: int, duration: int = typer.Option(None, '--duration', '-d',
help='Duration in minutes')):
"""Start Timer for a given open task."""
with Session(engine) as session:
try:
session.exec(select(Timer).where(Timer.end == None)).one()
typer.secho('\nThe Timer must be stopped first\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
except NoResultFound:
pass
try:
query = session.get(ToDo, task_id)
if not query.status == 'done':
if query.status == 'to do':
query.status = 'doing'
session.add(query)
if duration is not None:
duration = timedelta(minutes=duration)
if duration <= timedelta(minutes=0):
typer.secho(
f'\nDuration must be grater than 0\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
total_seconds = int(duration.total_seconds())
session.add(Timer(id_todo=task_id))
session.commit()
new_id = session.exec(select(func.max(Timer.id))).one()
typer.secho(
f'\nTask Start task {task_id}. Timer id: {new_id}\n',
fg=typer.colors.GREEN)
with typer.progressbar(length=total_seconds) as progress:
end = datetime.utcnow() + duration
while datetime.utcnow() < end:
time.sleep(1)
progress.update(1)
else:
typer.secho('\n\nYour Time is over! Well done!\n',
blink=True,
fg=typer.colors.BRIGHT_GREEN)
pop_up_msg()
remark = typer.confirm("Any remark?")
if remark:
remark = typer.prompt('Enter your remarks.')
else:
remark = None
stop(remarks=remark)
typer.Exit()
else:
session.add(Timer(id_todo=task_id))
session.commit()
new_id = session.exec(select( | func.max(Timer.id) | sqlmodel.func.max |
import time
from datetime import datetime, timedelta, date
import typer
from sqlalchemy.exc import NoResultFound, OperationalError
from sqlmodel import Session, select, func
from tabulate import tabulate
from . import edit
from . import reports
from .database import create_db_and_tables, engine
from .functions_aux import Status, make_table_view, pop_up_msg
from .tables import ToDo, Timer
app = typer.Typer()
app.add_typer(reports.app, name='report', help='Print customized reports.')
app.add_typer(edit.app, name='edit', help='Edit records.')
@app.command()
def add(task: str, project: str = typer.Option(None, '--project', '-p'),
due_date: datetime = typer.Option(None, '--due-date', '-d',
formats=['%Y-%m-%d']),
reminder: datetime = typer.Option(None, '--reminder', '-r',
formats=['%Y-%m-%d']),
status: Status = typer.Option(Status.to_do, '--status', '-s'),
tag: str = typer.Option(None, '--tag', '-t')):
"""Add task to the to-do list."""
try:
today = datetime.today()
if due_date is not None and due_date <= today:
typer.secho(f'\ndue date must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if reminder is not None and reminder <= today:
typer.secho(f'\nreminder must be grater than {today.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
if due_date is not None and reminder is not None and \
reminder >= due_date:
typer.secho(f'\nreminder must be smaller than {due_date.date()}\n',
fg=typer.colors.RED)
raise typer.Exit(code=1)
with Session(engine) as session:
if project is not None:
new_project = session.exec(select(ToDo).where(
ToDo.project == project)).first()
if new_project is not None:
ongoing_project = session.exec( | select(ToDo) | sqlmodel.select |
"""initial
Revision ID: ef<PASSWORD>
Revises:
Create Date: 2022-03-03 15:21:30.974400
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "ef<PASSWORD>"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"sandbox",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("tag", | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
"""initial
Revision ID: ef<PASSWORD>
Revises:
Create Date: 2022-03-03 15:21:30.974400
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "ef<PASSWORD>"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"sandbox",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("tag", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"job_applicant",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
"""initial
Revision ID: ef<PASSWORD>
Revises:
Create Date: 2022-03-03 15:21:30.974400
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "ef<PASSWORD>"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"sandbox",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("tag", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"job_applicant",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("sandbox_id", | sqlmodel.sql.sqltypes.GUID() | sqlmodel.sql.sqltypes.GUID |
"""initial
Revision ID: ef<PASSWORD>
Revises:
Create Date: 2022-03-03 15:21:30.974400
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "ef<PASSWORD>"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"sandbox",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("tag", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"job_applicant",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("degree", | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
"""initial
Revision ID: ef<PASSWORD>
Revises:
Create Date: 2022-03-03 15:21:30.974400
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "ef<PASSWORD>"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"sandbox",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("tag", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"job_applicant",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("degree", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("date", sa.DateTime(), nullable=True),
sa.Column(
"invitation_state", | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
"""initial
Revision ID: ef<PASSWORD>
Revises:
Create Date: 2022-03-03 15:21:30.974400
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "ef<PASSWORD>"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"sandbox",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("tag", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"job_applicant",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("degree", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("date", sa.DateTime(), nullable=True),
sa.Column(
"invitation_state", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column("connection_id", | sqlmodel.sql.sqltypes.GUID() | sqlmodel.sql.sqltypes.GUID |
"""initial
Revision ID: ef<PASSWORD>
Revises:
Create Date: 2022-03-03 15:21:30.974400
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "ef<PASSWORD>"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"sandbox",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("tag", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"job_applicant",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("degree", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("date", sa.DateTime(), nullable=True),
sa.Column(
"invitation_state", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column("connection_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("wallet_id", | sqlmodel.sql.sqltypes.GUID() | sqlmodel.sql.sqltypes.GUID |
"""initial
Revision ID: ef<PASSWORD>
Revises:
Create Date: 2022-03-03 15:21:30.974400
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "ef<PASSWORD>"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"sandbox",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("tag", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"job_applicant",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("degree", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("date", sa.DateTime(), nullable=True),
sa.Column(
"invitation_state", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column("connection_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("wallet_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("alias", | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
"""initial
Revision ID: ef<PASSWORD>
Revises:
Create Date: 2022-03-03 15:21:30.974400
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "ef<PASSWORD>"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"sandbox",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("tag", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"job_applicant",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("degree", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("date", sa.DateTime(), nullable=True),
sa.Column(
"invitation_state", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column("connection_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("wallet_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("alias", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.ForeignKeyConstraint(
["sandbox_id"],
["sandbox.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name", "sandbox_id"),
)
op.create_index(
op.f("ix_job_applicant_name"), "job_applicant", ["name"], unique=False
)
op.create_table(
"line_of_business",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
"""initial
Revision ID: ef<PASSWORD>
Revises:
Create Date: 2022-03-03 15:21:30.974400
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "ef<PASSWORD>"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"sandbox",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("tag", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"job_applicant",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("degree", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("date", sa.DateTime(), nullable=True),
sa.Column(
"invitation_state", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column("connection_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("wallet_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("alias", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.ForeignKeyConstraint(
["sandbox_id"],
["sandbox.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name", "sandbox_id"),
)
op.create_index(
op.f("ix_job_applicant_name"), "job_applicant", ["name"], unique=False
)
op.create_table(
"line_of_business",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("webhook_url", | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
"""initial
Revision ID: ef<PASSWORD>
Revises:
Create Date: 2022-03-03 15:21:30.974400
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "ef<PASSWORD>"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"sandbox",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("tag", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"job_applicant",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("degree", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("date", sa.DateTime(), nullable=True),
sa.Column(
"invitation_state", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column("connection_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("wallet_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("alias", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.ForeignKeyConstraint(
["sandbox_id"],
["sandbox.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name", "sandbox_id"),
)
op.create_index(
op.f("ix_job_applicant_name"), "job_applicant", ["name"], unique=False
)
op.create_table(
"line_of_business",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("webhook_url", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("sandbox_id", | sqlmodel.sql.sqltypes.GUID() | sqlmodel.sql.sqltypes.GUID |
"""initial
Revision ID: ef<PASSWORD>
Revises:
Create Date: 2022-03-03 15:21:30.974400
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "ef<PASSWORD>"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"sandbox",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("tag", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"job_applicant",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("degree", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("date", sa.DateTime(), nullable=True),
sa.Column(
"invitation_state", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column("connection_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("wallet_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("alias", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.ForeignKeyConstraint(
["sandbox_id"],
["sandbox.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name", "sandbox_id"),
)
op.create_index(
op.f("ix_job_applicant_name"), "job_applicant", ["name"], unique=False
)
op.create_table(
"line_of_business",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("webhook_url", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("traction_issue_enabled", sa.Boolean(), nullable=False),
sa.Column("public_did", | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |
"""initial
Revision ID: ef<PASSWORD>
Revises:
Create Date: 2022-03-03 15:21:30.974400
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "ef<PASSWORD>"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"sandbox",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("tag", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"job_applicant",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("degree", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("date", sa.DateTime(), nullable=True),
sa.Column(
"invitation_state", sqlmodel.sql.sqltypes.AutoString(), nullable=True
),
sa.Column("connection_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("wallet_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
sa.Column("alias", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.ForeignKeyConstraint(
["sandbox_id"],
["sandbox.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name", "sandbox_id"),
)
op.create_index(
op.f("ix_job_applicant_name"), "job_applicant", ["name"], unique=False
)
op.create_table(
"line_of_business",
sa.Column(
"id",
postgresql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
nullable=False,
),
sa.Column(
"created_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
postgresql.TIMESTAMP(),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("webhook_url", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("sandbox_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("traction_issue_enabled", sa.Boolean(), nullable=False),
sa.Column("public_did", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column("cred_def_id", | sqlmodel.sql.sqltypes.AutoString() | sqlmodel.sql.sqltypes.AutoString |