Make API for PortsWatch

This commit is contained in:
Dryusdan 2024-01-26 23:42:41 +01:00
parent 9c95546d5e
commit 65745a0498
Signed by: Dryusdan
GPG key ID: EC1438DDE24E27D7
21 changed files with 2095 additions and 0 deletions

1
.gitignore vendored
View file

@ -160,3 +160,4 @@ cython_debug/
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
portswatch.db

116
alembic.ini Normal file
View file

@ -0,0 +1,116 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = migrations
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to migrations/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

1
migrations/README Normal file
View file

@ -0,0 +1 @@
Generic single-database configuration.

101
migrations/env.py Normal file
View file

@ -0,0 +1,101 @@
from logging.config import fileConfig
from pathlib import Path
from alembic import context
from sqlalchemy import engine_from_config, pool
from portswatch.classes.logs import Log
from portswatch.classes.settings import Settings
from portswatch.database import Base
from portswatch.schemas import *
# Load PortsWatch settings
settings = Settings()
log = Log(name=__name__)
if settings.db_type == "sqlite":
SQLALCHEMY_DATABASE_URL = f"{settings.db_type}:///{settings.db_path}"
sqlite_path = Path(settings.db_path)
# if not sqlite_path.exists():
# log.info(settings.db_path)
# db = sqlite3.connect(SQLALCHEMY_DATABASE_URL)
#: db.close()
else:
SQLALCHEMY_DATABASE_URL = (
f"{settings.db_type}://"
f"{settings.db_user}:{settings.db_pass}"
f"@{settings.db_host}:{settings.db_port}"
f"/{settings.db_name}"
)
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
config.set_main_option("sqlalchemy.url", SQLALCHEMY_DATABASE_URL)
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
# target_metadata = [IP.metadata, Ports.metadata]
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

26
migrations/script.py.mako Normal file
View file

@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View file

@ -0,0 +1,43 @@
"""Create base tables
Revision ID: 6b67f20ed0d4
Revises:
Create Date: 2024-01-26 20:40:16.322911
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '6b67f20ed0d4'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('ips',
sa.Column('id', sa.BINARY(length=16), nullable=False),
sa.Column('ip', sa.VARBINARY(length=16), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('ip')
)
op.create_table('ports',
sa.Column('id', sa.BINARY(length=16), nullable=False),
sa.Column('port', sa.Integer(), nullable=True),
sa.Column('ip_id', sa.BINARY(length=16), nullable=True),
sa.ForeignKeyConstraint(['ip_id'], ['ips.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('ports')
op.drop_table('ips')
# ### end Alembic commands ###

1170
poetry.lock generated Normal file

File diff suppressed because it is too large Load diff

182
portswatch/api.py Normal file
View file

@ -0,0 +1,182 @@
from contextlib import asynccontextmanager
from typing import Annotated
import uvicorn
from fastapi import FastAPI, Path, status
from fastapi.encoders import jsonable_encoder
from fastapi.responses import JSONResponse
from portswatch.classes.logs import Log
from portswatch.classes.settings import Settings
from portswatch.controllers.ip import IPController
from portswatch.cruds.ip import IPCrud
from portswatch.cruds.port import PortCrud
from portswatch.models.ip import IPcreateModel, IPqueryModel
from portswatch.models.ports import PortsbaseModel, PortscreateModel, PortsqueryModel
settings = Settings()
log = Log(name=__name__)
@asynccontextmanager
async def lifespan(app: FastAPI):
logger = Log.uvicorn_log()
yield
app = FastAPI(lifespan=lifespan)
@app.get("/")
async def read_root():
return {"Hello": "World"}
@app.post("/ip", status_code=status.HTTP_201_CREATED)
async def post_ip(ip: IPcreateModel | IPqueryModel):
if not IPController(ip).is_ip_authorized():
message = {
"message": (
f"{ip.ip} is not authorized by instance policy. "
f"Instance authorize this type of IP: "
f"{' | '.join(settings.authorized_ips_type)}"
)
}
return JSONResponse(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, content=message
)
if IPCrud().get_ip(ip):
message = {"message": (f"{ip.ip} already exist. ")}
return JSONResponse(status_code=status.HTTP_409_CONFLICT, content=message)
Ip_crud = IPCrud().add(ip)
if not Ip_crud:
message = {"message": f"{ip.ip} isn't add into PortsWatch."}
return JSONResponse(status_code=status.HTTP_400_BAD_REQUEST, content=message)
return JSONResponse(content={})
@app.get("/ip/{ip}", status_code=status.HTTP_200_OK)
async def read_ip(ip: Annotated[str, Path(title="The IP to get")]):
is_ip_global(ip)
ip_model = IPqueryModel(ip=ip)
ip = IPCrud().get_ip(ip_model)
ports = []
for port in ip.ports:
ports.append(port.port)
ip.ports = ports
message = jsonable_encoder(ip.dict())
return JSONResponse(content=message)
@app.post("/ip/{ip}/port", status_code=status.HTTP_201_CREATED)
async def write_port(
ip: Annotated[str, Path(title="The IP attached")],
ports: PortsbaseModel | PortsqueryModel,
):
is_ip_global(ip)
ip_model = IPqueryModel(ip=ip)
ip_data = IPCrud().get_ip(ip_model)
if not ip_data:
message = {"message": "Curious moment. IP is valid but ID is not"}
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, content=message
)
port = PortscreateModel(port=ports.port, ip_id=ip_data.id)
port_crud = PortCrud().get_port(port)
if port_crud:
message = {"message": f"Port {ports.port} already exist for {ip}."}
return JSONResponse(status_code=status.HTTP_409_CONFLICT, content=message)
port_crud = PortCrud().add(port)
if not port_crud:
message = {
"message": f"Port {ports.port} (for ip {ip}) isn't add into PortsWatch."
}
return JSONResponse(status_code=status.HTTP_400_BAD_REQUEST, content=message)
return JSONResponse(content={})
@app.get("/ip/{ip}/port/{port}", status_code=status.HTTP_200_OK)
async def read_port(
ip: Annotated[str, Path(title="The IP to get")],
port: Annotated[int, Path(title="The port")],
):
is_ip_global(ip)
ip_model = IPqueryModel(ip=ip)
ip_data = IPCrud().get_ip(ip_model)
if not ip_data:
message = {"message": "Curious moment. IP is valid but ID is not"}
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, content=message
)
port = PortsqueryModel(port=port, ip_id=ip_data.id)
port_crud = PortCrud().get_port(port)
if not port_crud:
message = {"message": f"Port {port.port} not for {ip}."}
return JSONResponse(status_code=status.HTTP_404_NOT_FOUND, content=message)
return JSONResponse(content=jsonable_encoder(port_crud.dict()))
@app.delete("/ip/{ip}/port/{port}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_port(
ip: Annotated[str, Path(title="The IP to get")],
port: Annotated[int, Path(title="The port")],
):
is_ip_global(ip)
ip_model = IPqueryModel(ip=ip)
ip_data = IPCrud().get_ip(ip_model)
if not ip_data:
message = {"message": "Curious moment. IP is valid but ID is not"}
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, content=message
)
port = PortsqueryModel(port=port, ip_id=ip_data.id)
port_crud = PortCrud().get_port(port)
print(port_crud)
if not port_crud:
message = {"message": f"Port {port.port} not for {ip}."}
return JSONResponse(status_code=status.HTTP_404_NOT_FOUND, content=message)
if not PortCrud().delete(port_crud):
message = {"message": f"Cant't delete {port.port} for {ip}."}
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, content=message
)
return JSONResponse(content={})
@app.get("/ips", status_code=status.HTTP_200_OK)
async def read_ips():
message = {"ips": []}
for ip in IPCrud().get_ips():
print(ip)
ports = []
for port in ip.ports:
ports.append(port.port)
ip.ports = ports
message["ips"].append(jsonable_encoder(ip.dict()))
return JSONResponse(content=message)
def is_ip_global(ip_query: str):
check_ip_bool, check_ip_message = IPController.is_ip(ip_query)
if not check_ip_bool:
return JSONResponse(
status_code=status.HTTP_400_BAD_REQUEST, content=check_ip_message
)
ip_model = IPqueryModel(ip=ip_query)
ip = IPCrud().get_ip(ip_model)
if not ip:
message = {"message": f"{ip_model.ip.compressed} not found"}
return JSONResponse(status_code=status.HTTP_404_NOT_FOUND, content=message)
if __name__ == "__main__":
uvicorn.run(
"api:app",
host=settings.host,
port=settings.port,
reload=settings.debug,
log_config=None,
)

18
portswatch/classes/ips.py Normal file
View file

@ -0,0 +1,18 @@
import pprint
from pydantic.networks import IPvAnyAddress
from portswatch.classes.logs import Log
from portswatch.classes.settings import Settings
settings = Settings()
log = Log(name=__name__)
class IPs:
def __init__(
self,
ip: IPvAnyAddress,
):
pprint.pprint(ip)
pass

View file

@ -0,0 +1,34 @@
import logging
from rich.logging import RichHandler
from portswatch.classes.settings import Settings
settings = Settings()
class Log:
def __new__(self, name: str = "default", level: str = None):
level = Log.get_level(level)
log = logging.basicConfig(
level=level,
format="%(message)s %(module)s %(funcName)s",
handlers=[RichHandler(rich_tracebacks=True)],
)
log = logging.getLogger(name=name)
return log
@staticmethod
def uvicorn_log(level: str = None):
return Log(name="uvicorn.access", level=level)
@staticmethod
def get_level(level: str = None):
log_level_list = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
if hasattr(settings, "log_level"):
level = settings.log_level.upper()
if level not in log_level_list:
level = "INFO"
return level

View file

@ -0,0 +1,45 @@
import logging
from typing import Optional, Tuple, Type
from pydantic_settings import BaseSettings, PydanticBaseSettingsSource
logger = logging.getLogger(__name__)
class ConfigVars:
env_file = ".env"
env_file_encoding = "utf-8"
@classmethod
def customise_sources(
cls,
settings_cls: Type[BaseSettings],
init_settings: PydanticBaseSettingsSource,
env_settings: PydanticBaseSettingsSource,
dotenv_settings: PydanticBaseSettingsSource,
) -> Tuple[PydanticBaseSettingsSource, ...]:
return (
init_settings,
env_settings,
dotenv_settings,
)
class Settings(BaseSettings):
debug: bool = False
host: str = "127.0.0.1"
port: int = 15789
log_level: str = "INFO"
db_type: str = "postgresql"
db_host: Optional[str] = None
db_path: Optional[str] = None
db_name: Optional[str] = None
db_user: Optional[str] = None
db_pass: Optional[str] = None
db_port: Optional[int] = None
authorized_ips_type: list = ["public"]
class Config(ConfigVars):
"""Config."""
pass

View file

@ -0,0 +1,51 @@
import ipaddress
from typing import Any
from portswatch.classes.logs import Log
from portswatch.classes.settings import Settings
from portswatch.models.ip import IPcreateModel, IPModel
settings = Settings()
log = Log(name=__name__)
class IPController:
def __init__(self, ip: IPModel | IPcreateModel):
self.ip = ip.ip
def is_ip_authorized(self):
for ips_type in settings.authorized_ips_type:
"""
Type of ip. According to ipaddress documentation : https://docs.python.org/3/library/ipaddress.html#ipaddress.IPv6Address
"""
match ips_type:
case "multicast":
if self.ip.is_multicast:
return True
case "private":
if self.ip.is_private:
return True
case "public":
if self.ip.is_global:
return True
case "unspecified":
if self.ip.is_unspecified:
return True
case "reserved":
if self.ip.is_reserved:
return True
case "loopback":
if self.ip.is_loopback:
return True
case "link_local":
if self.ip.is_link_local:
return True
return False
@staticmethod
def is_ip(ip: Any):
try:
ip = ipaddress.ip_address(ip)
return True, None
except ValueError as err:
return False, f"{err}"

Binary file not shown.

56
portswatch/cruds/ip.py Normal file
View file

@ -0,0 +1,56 @@
import sqlite3
from uuid import UUID, uuid4
from sqlalchemy import exc
from portswatch.classes.logs import Log
from portswatch.database import SessionLocal
from portswatch.models.ip import IPcreateModel, IPModel, IPqueryModel
from portswatch.schemas import IP
log = Log(name=__name__)
class IPCrud:
def add(self, ip: IPcreateModel):
ip_schema = IP(id=uuid4().bytes, ip=ip.ip.packed)
with SessionLocal() as session, session.begin():
try:
session.add(ip_schema)
except exc.SQLAlchemyError as e:
log.debug(
f"Portswatch encounter an error when it try to add {ip.ip} : {e}"
)
session.rollback()
return False
except sqlite3.IntegrityError as e:
log.debug(
f"Sqlite encounter a problem during the processing of {ip.ip} : {e}"
)
return False
else:
session.commit()
return True
def get_ip(self, ip: IPqueryModel):
with SessionLocal() as session:
ip_query = session.query(IP).filter(IP.ip == ip.ip.packed).first()
if ip_query:
return IPModel(id=ip_query.id, ip=ip_query.ip, ports=ip_query.ports)
else:
return None
def get_ip_by_id(self, id: UUID):
with SessionLocal() as session:
ip = session.query(IP).filter(IP.id == id.bytes).first()
if ip:
return IPModel(id=ip.id, ip=ip.ip)
else:
return None
def get_ips(self):
with SessionLocal() as session:
ips = []
for ip in session.query(IP).all():
ips.append(IPModel(id=ip.id, ip=ip.ip, ports=ip.ports))
return ips

73
portswatch/cruds/port.py Normal file
View file

@ -0,0 +1,73 @@
import sqlite3
from uuid import uuid4
from sqlalchemy import exc
from portswatch.classes.logs import Log
from portswatch.database import SessionLocal
from portswatch.models.ports import PortscreateModel, PortsModel, PortsqueryModel
from portswatch.schemas import Ports
log = Log(name=__name__)
class PortCrud:
def add(self, port: PortscreateModel):
port_schema = Ports(id=uuid4().bytes, ip_id=port.ip_id.bytes, port=port.port)
with SessionLocal() as session, session.begin():
try:
session.add(port_schema)
except exc.SQLAlchemyError as e:
log.debug(
f"Portswatch encounter an error when it try to add {port.port}: {e}"
)
session.rollback()
return False
except sqlite3.IntegrityError as e:
log.debug(
f"Sqlite encounter a problem during "
f"the processing of {port.port}: {e}"
)
return False
else:
session.commit()
return True
def get_port(self, port: PortsqueryModel | PortscreateModel):
with SessionLocal() as session:
port_query = (
session.query(Ports)
.filter(Ports.port == port.port, Ports.ip_id == port.ip_id.bytes)
.first()
)
if port_query:
return PortsModel(id=port_query.id, port=port_query.port)
else:
return None
def delete(self, port: PortsModel):
# port_schema = Ports(id=port.id, port=port.port)
with SessionLocal() as session:
port_query = (
session.query(Ports)
.filter(Ports.port == port.port, Ports.id == port.id.bytes)
.first()
)
with SessionLocal() as session, session.begin():
try:
session.delete(port_query)
except exc.SQLAlchemyError as e:
log.debug(
f"Portswatch encounter an error when it try to add {port.port}: {e}"
)
session.rollback()
return False
except sqlite3.IntegrityError as e:
log.debug(
f"Sqlite encounter a problem during "
f"the deletion of {port.port}: {e}"
)
return False
else:
session.commit()
return True

34
portswatch/database.py Normal file
View file

@ -0,0 +1,34 @@
#!/usr/bin/env python3
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy.pool import NullPool
from portswatch.classes.logs import Log
from portswatch.classes.settings import Settings
log = Log(name=__name__)
settings = Settings()
if settings.db_type == "sqlite":
SQLALCHEMY_DATABASE_URL = f"{settings.db_type}:///{settings.db_path}"
else:
SQLALCHEMY_DATABASE_URL = (
f"{settings.db_type}:///"
f"{settings.db_user}:{settings.db_pass}"
f"@{settings.db_host}:{settings.db_port}"
f"/{settings.db_name}"
)
engine = create_engine(
SQLALCHEMY_DATABASE_URL,
echo=False,
poolclass=NullPool,
)
SessionLocal = sessionmaker(
autocommit=False, autoflush=False, expire_on_commit=False, bind=engine
)
Base = declarative_base()

Binary file not shown.

27
portswatch/models/ip.py Normal file
View file

@ -0,0 +1,27 @@
from ipaddress import IPv4Address, IPv6Address
from pydantic import UUID4, BaseModel
class IPbaseModel(BaseModel):
ip: IPv4Address | IPv6Address
class IPcreateModel(IPbaseModel):
pass
class IPqueryModel(IPbaseModel):
pass
class IPModel(IPbaseModel):
id: UUID4
ports: list = []
class Config:
from_attributes = True
class IPbyIDqueryModel(BaseModel):
id: UUID4

View file

@ -0,0 +1,24 @@
from pydantic import UUID4, BaseModel
class PortsbaseModel(BaseModel):
port: int
class PortscreateModel(PortsbaseModel):
ip_id: UUID4
class PortsqueryModel(PortsbaseModel):
ip_id: UUID4
class PortsModel(PortsbaseModel):
id: UUID4
class Config:
from_attributes = True
class PortsbyIDqueryModel(BaseModel):
id: UUID4

22
portswatch/schemas.py Normal file
View file

@ -0,0 +1,22 @@
from sqlalchemy import Column, ForeignKey, Integer
from sqlalchemy.orm import mapped_column, relationship
from sqlalchemy.types import BINARY, VARBINARY
from portswatch.database import Base
class IP(Base):
__tablename__ = "ips"
id = mapped_column(BINARY(16), primary_key=True)
ip = Column(VARBINARY(16), unique=True)
ports = relationship("Ports", back_populates="ip")
class Ports(Base):
__tablename__ = "ports"
id = mapped_column(BINARY(16), primary_key=True)
port = Column(Integer())
ip_id = mapped_column(ForeignKey("ips.id"))
ip = relationship("IP", back_populates="ports")

71
pyproject.toml Normal file
View file

@ -0,0 +1,71 @@
[tool.poetry]
name = "portswatch"
version = "0.1.0"
description = "A port monitoring tool to detect all opened port and wich port can be openned"
authors = ["Dryusdan <contact@dryusdan.fr>"]
license = "GPL-3"
readme = "README.md"
packages = [{include = "portswatch"}]
[tool.poetry.scripts]
portwatch = "portswatch.app:app"
[tool.poetry.dependencies]
python = "^3.11"
fastapi = "^0.109.0"
uvicorn = {extras = ["standard"], version = "^0.27.0"}
pydantic-settings = "^2.1.0"
rich = "^13.7.0"
sqlalchemy = "^2.0.25"
alembic = "^1.13.1"
[tool.poetry.group.dev.dependencies]
ruff = "^0.1.14"
isort = "^5.13.2"
[tool.poetry.group.test.dependencies]
pytest = "^7.4.4"
pytest-mock = "^3.12.0"
pytest-cov = "^4.1.0"
coverage = {extras = ["toml"], version = "^7.4.0"}
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[tool.ruff]
exclude = [
"migrations",
]
[tool.ruff.lint]
select = [
# pycodestyle
"E",
# Pyflakes
"F",
# pyupgrade
"UP",
# flake8-bugbear
"B",
# flake8-simplify
"SIM",
# isort
"I",
]
[tool.ruff.format]
docstring-code-format = true
[tool.coverage.run]
omit = [
"tests/*",
]
[tool.coverage.report]
fail_under = 90
[tool.isort]
skip_glob = ["migrations/*"]
skip = [".gitignore", ".dockerignore", ".env", "env.py"]