Compare commits

...

11 Commits

Author SHA1 Message Date
Dominik Madarász 20140f6f31 Merge branch 'v2' of https://git.sr.ht/~tsileo/microblog.pub 2023-08-01 21:33:22 +00:00
Thomas Sileo 9c8693ea55 Quick hotfix for retries 2023-07-14 17:50:26 +02:00
Thomas Sileo febd8c3d26 Upgrade deps 2023-07-03 20:36:24 +02:00
Thomas Sileo a5290af5c8 Fix proxy by forwarding content-encoding 2023-07-03 20:29:10 +02:00
Thomas Sileo 2cec800332 Fix for pruned Move objects 2023-07-03 20:25:03 +02:00
Thomas Sileo 3c07494809 Make CSRF expiration configurable and increase default value 2023-06-09 22:22:37 +02:00
Thomas Sileo 2433fa01cd Fix typing 2023-06-09 22:22:12 +02:00
Thomas Sileo 3169890a39 Update deps 2023-06-09 21:58:23 +02:00
Thomas Sileo 4e1bb330aa Fix OAuth introspection endpoint 2023-02-03 08:55:31 +01:00
Thomas Sileo 625f399309 Fix OAuth introspection endpoint 2023-02-03 08:32:50 +01:00
Thomas Sileo 2bd6c98538 Add OAuth 2.0 introspection endpoint 2023-02-01 20:12:53 +01:00
11 changed files with 1850 additions and 1682 deletions

View File

@ -126,6 +126,7 @@ class Config(pydantic.BaseModel):
key_path: str | None = None key_path: str | None = None
session_timeout: int = 3600 * 24 * 3 # in seconds, 3 days by default session_timeout: int = 3600 * 24 * 3 # in seconds, 3 days by default
csrf_token_exp: int = 3600
disabled_notifications: list[str] = [] disabled_notifications: list[str] = []
@ -265,7 +266,7 @@ def verify_csrf_token(
if redirect_url: if redirect_url:
please_try_again = f'<a href="{redirect_url}">please try again</a>' please_try_again = f'<a href="{redirect_url}">please try again</a>'
try: try:
csrf_serializer.loads(csrf_token, max_age=1800) csrf_serializer.loads(csrf_token, max_age=CONFIG.csrf_token_exp)
except (itsdangerous.BadData, itsdangerous.SignatureExpired): except (itsdangerous.BadData, itsdangerous.SignatureExpired):
logger.exception("Failed to verify CSRF token") logger.exception("Failed to verify CSRF token")
raise HTTPException( raise HTTPException(

View File

@ -60,7 +60,7 @@ def _set_next_try(
if not outgoing_activity.tries: if not outgoing_activity.tries:
raise ValueError("Should never happen") raise ValueError("Should never happen")
if outgoing_activity.tries == _MAX_RETRIES: if outgoing_activity.tries >= _MAX_RETRIES:
outgoing_activity.is_errored = True outgoing_activity.is_errored = True
outgoing_activity.next_try = None outgoing_activity.next_try = None
else: else:

View File

@ -10,6 +10,8 @@ from fastapi import Form
from fastapi import HTTPException from fastapi import HTTPException
from fastapi import Request from fastapi import Request
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
from fastapi.security import HTTPBasic
from fastapi.security import HTTPBasicCredentials
from loguru import logger from loguru import logger
from pydantic import BaseModel from pydantic import BaseModel
from sqlalchemy import select from sqlalchemy import select
@ -26,6 +28,8 @@ from app.redirect import redirect
from app.utils import indieauth from app.utils import indieauth
from app.utils.datetime import now from app.utils.datetime import now
basic_auth = HTTPBasic()
router = APIRouter() router = APIRouter()
@ -41,6 +45,7 @@ async def well_known_authorization_server(
"revocation_endpoint": request.url_for("indieauth_revocation_endpoint"), "revocation_endpoint": request.url_for("indieauth_revocation_endpoint"),
"revocation_endpoint_auth_methods_supported": ["none"], "revocation_endpoint_auth_methods_supported": ["none"],
"registration_endpoint": request.url_for("oauth_registration_endpoint"), "registration_endpoint": request.url_for("oauth_registration_endpoint"),
"introspection_endpoint": request.url_for("oauth_introspection_endpoint"),
} }
@ -378,6 +383,8 @@ async def _check_access_token(
class AccessTokenInfo: class AccessTokenInfo:
scopes: list[str] scopes: list[str]
client_id: str | None client_id: str | None
access_token: str
exp: int
async def verify_access_token( async def verify_access_token(
@ -409,6 +416,13 @@ async def verify_access_token(
if access_token.indieauth_authorization_request if access_token.indieauth_authorization_request
else None else None
), ),
access_token=access_token.access_token,
exp=int(
(
access_token.created_at.replace(tzinfo=timezone.utc)
+ timedelta(seconds=access_token.expires_in)
).timestamp()
),
) )
@ -434,6 +448,13 @@ async def check_access_token(
if access_token.indieauth_authorization_request if access_token.indieauth_authorization_request
else None else None
), ),
access_token=access_token.access_token,
exp=int(
(
access_token.created_at.replace(tzinfo=timezone.utc)
+ timedelta(seconds=access_token.expires_in)
).timestamp()
),
) )
logger.info( logger.info(
@ -474,3 +495,58 @@ async def indieauth_revocation_endpoint(
content={}, content={},
status_code=200, status_code=200,
) )
@router.post("/token_introspection")
async def oauth_introspection_endpoint(
request: Request,
credentials: HTTPBasicCredentials = Depends(basic_auth),
db_session: AsyncSession = Depends(get_db_session),
token: str = Form(),
) -> JSONResponse:
registered_client = (
await db_session.scalars(
select(models.OAuthClient).where(
models.OAuthClient.client_id == credentials.username,
models.OAuthClient.client_secret == credentials.password,
)
)
).one_or_none()
if not registered_client:
raise HTTPException(status_code=401, detail="unauthenticated")
access_token = (
await db_session.scalars(
select(models.IndieAuthAccessToken)
.where(models.IndieAuthAccessToken.access_token == token)
.join(
models.IndieAuthAuthorizationRequest,
models.IndieAuthAccessToken.indieauth_authorization_request_id
== models.IndieAuthAuthorizationRequest.id,
)
.where(
models.IndieAuthAuthorizationRequest.client_id == credentials.username
)
)
).one_or_none()
if not access_token:
return JSONResponse(content={"active": False})
is_token_valid, _ = await _check_access_token(db_session, token)
if not is_token_valid:
return JSONResponse(content={"active": False})
return JSONResponse(
content={
"active": True,
"client_id": credentials.username,
"scope": access_token.scope,
"exp": int(
(
access_token.created_at.replace(tzinfo=timezone.utc)
+ timedelta(seconds=access_token.expires_in)
).timestamp()
),
},
status_code=200,
)

View File

@ -1426,6 +1426,7 @@ async def serve_proxy_media(
_filter_proxy_resp_headers( _filter_proxy_resp_headers(
proxy_resp, proxy_resp,
[ [
"content-encoding",
"content-length", "content-length",
"content-type", "content-type",
"content-range", "content-range",
@ -1732,7 +1733,7 @@ async def _gen_rss_feed(
fe.id(outbox_object.url) fe.id(outbox_object.url)
if outbox_object.name is not None: if outbox_object.name is not None:
fe.title(outbox_object.name) fe.title(outbox_object.name)
elif not is_rss: # Atom feeds require a title elif not is_rss: # Atom feeds require a title
fe.title(outbox_object.url) fe.title(outbox_object.url)
fe.link(href=outbox_object.url) fe.link(href=outbox_object.url)

View File

@ -132,7 +132,7 @@ async def post_micropub_endpoint(
h = form_data["h"] h = form_data["h"]
entry_type = f"h-{h}" entry_type = f"h-{h}"
logger.info(f"Creating {entry_type}") logger.info(f"Creating {entry_type=} with {access_token_info=}")
if entry_type != "h-entry": if entry_type != "h-entry":
return JSONResponse( return JSONResponse(
@ -150,7 +150,7 @@ async def post_micropub_endpoint(
else: else:
content = form_data["content"] content = form_data["content"]
public_id = await send_create( public_id, _ = await send_create(
db_session, db_session,
"Note", "Note",
content, content,

View File

@ -1,4 +1,5 @@
import enum import enum
from datetime import datetime
from typing import Any from typing import Any
from typing import Optional from typing import Optional
from typing import Union from typing import Union
@ -436,7 +437,7 @@ class OutboxObjectAttachment(Base):
outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False) outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False)
upload_id = Column(Integer, ForeignKey("upload.id"), nullable=False) upload_id = Column(Integer, ForeignKey("upload.id"), nullable=False)
upload = relationship(Upload, uselist=False) upload: Mapped["Upload"] = relationship(Upload, uselist=False)
class IndieAuthAuthorizationRequest(Base): class IndieAuthAuthorizationRequest(Base):
@ -459,7 +460,9 @@ class IndieAuthAccessToken(Base):
__tablename__ = "indieauth_access_token" __tablename__ = "indieauth_access_token"
id = Column(Integer, primary_key=True, index=True) id = Column(Integer, primary_key=True, index=True)
created_at = Column(DateTime(timezone=True), nullable=False, default=now) created_at: Mapped[datetime] = Column(
DateTime(timezone=True), nullable=False, default=now
)
# Will be null for personal access tokens # Will be null for personal access tokens
indieauth_authorization_request_id = Column( indieauth_authorization_request_id = Column(
@ -470,9 +473,9 @@ class IndieAuthAccessToken(Base):
uselist=False, uselist=False,
) )
access_token = Column(String, nullable=False, unique=True, index=True) access_token: Mapped[str] = Column(String, nullable=False, unique=True, index=True)
refresh_token = Column(String, nullable=True, unique=True, index=True) refresh_token = Column(String, nullable=True, unique=True, index=True)
expires_in = Column(Integer, nullable=False) expires_in: Mapped[int] = Column(Integer, nullable=False)
scope = Column(String, nullable=False) scope = Column(String, nullable=False)
is_revoked = Column(Boolean, nullable=False, default=False) is_revoked = Column(Boolean, nullable=False, default=False)
was_refreshed = Column(Boolean, nullable=False, default=False, server_default="0") was_refreshed = Column(Boolean, nullable=False, default=False, server_default="0")

View File

@ -151,7 +151,7 @@ def _set_next_try(
if not outgoing_activity.tries: if not outgoing_activity.tries:
raise ValueError("Should never happen") raise ValueError("Should never happen")
if outgoing_activity.tries == _MAX_RETRIES: if outgoing_activity.tries >= _MAX_RETRIES:
outgoing_activity.is_errored = True outgoing_activity.is_errored = True
outgoing_activity.next_try = None outgoing_activity.next_try = None
else: else:

View File

@ -102,6 +102,8 @@ async def _prune_old_inbox_objects(
models.InboxObject.ap_type.in_(["Note"]), models.InboxObject.ap_type.in_(["Note"]),
) )
), ),
# Keep Move object as they are linked to notifications
models.InboxObject.ap_type.not_in(["Move"]),
# Filter by retention days # Filter by retention days
models.InboxObject.ap_published_at models.InboxObject.ap_published_at
< now() - timedelta(days=INBOX_RETENTION_DAYS), < now() - timedelta(days=INBOX_RETENTION_DAYS),

View File

@ -51,7 +51,7 @@
{% elif notif.notification_type.value == "unblock" %} {% elif notif.notification_type.value == "unblock" %}
{{ notif_actor_action(notif, "was unblocked") }} {{ notif_actor_action(notif, "was unblocked") }}
{{ utils.display_actor(notif.actor, actors_metadata) }} {{ utils.display_actor(notif.actor, actors_metadata) }}
{%- elif notif.notification_type.value == "move" %} {%- elif notif.notification_type.value == "move" and notif.inbox_object %}
{# for move notif, the actor is the target and the inbox object the Move activity #} {# for move notif, the actor is the target and the inbox object the Move activity #}
<div class="actor-action"> <div class="actor-action">
<a href="{{ url_for("admin_profile") }}?actor_id={{ notif.inbox_object.actor.ap_id }}"> <a href="{{ url_for("admin_profile") }}?actor_id={{ notif.inbox_object.actor.ap_id }}">

View File

@ -60,7 +60,7 @@ async def save_upload(db_session: AsyncSession, f: UploadFile) -> models.Upload:
destination_image.putdata(original_image.getdata()) destination_image.putdata(original_image.getdata())
destination_image.save( destination_image.save(
dest_filename, dest_filename,
format=_original_image.format, format=_original_image.format, # type: ignore
) )
with open(dest_filename, "rb") as dest_f: with open(dest_filename, "rb") as dest_f:

3425
poetry.lock generated

File diff suppressed because it is too large Load Diff