mirror of
https://gitlab.com/bramw/baserow.git
synced 2025-04-27 06:00:37 +00:00
Resolve "Optionally only serve files to authenticated users"
This commit is contained in:
parent
c2fe98752d
commit
a981e3c288
36 changed files with 1678 additions and 54 deletions
backend
changelog/entries/unreleased/feature
docker-compose.ymldocs/installation
enterprise/backend
src/baserow_enterprise
tests/baserow_enterprise_tests
api/secure_file_serve
secure_file_serve
web-frontend
|
@ -615,7 +615,7 @@ class AttrDict(dict):
|
|||
globals()[key] = value
|
||||
|
||||
|
||||
DEFAULT_FILE_STORAGE = "django.core.files.storage.FileSystemStorage"
|
||||
BASE_FILE_STORAGE = "django.core.files.storage.FileSystemStorage"
|
||||
|
||||
AWS_STORAGE_ENABLED = os.getenv("AWS_STORAGE_BUCKET_NAME", "") != ""
|
||||
GOOGLE_STORAGE_ENABLED = os.getenv("GS_BUCKET_NAME", "") != ""
|
||||
|
@ -633,7 +633,7 @@ if sum(ALL_STORAGE_ENABLED_VARS) > 1:
|
|||
)
|
||||
|
||||
if AWS_STORAGE_ENABLED:
|
||||
DEFAULT_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage"
|
||||
BASE_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage"
|
||||
AWS_S3_FILE_OVERWRITE = False
|
||||
set_settings_from_env_if_present(
|
||||
AttrDict(vars()),
|
||||
|
@ -682,7 +682,7 @@ if GOOGLE_STORAGE_ENABLED:
|
|||
# See https://django-storages.readthedocs.io/en/latest/backends/gcloud.html for
|
||||
# details on what these env variables do
|
||||
|
||||
DEFAULT_FILE_STORAGE = "storages.backends.gcloud.GoogleCloudStorage"
|
||||
BASE_FILE_STORAGE = "storages.backends.gcloud.GoogleCloudStorage"
|
||||
GS_FILE_OVERWRITE = False
|
||||
set_settings_from_env_if_present(
|
||||
AttrDict(vars()),
|
||||
|
@ -708,7 +708,7 @@ if GOOGLE_STORAGE_ENABLED:
|
|||
)
|
||||
|
||||
if AZURE_STORAGE_ENABLED:
|
||||
DEFAULT_FILE_STORAGE = "storages.backends.azure_storage.AzureStorage"
|
||||
BASE_FILE_STORAGE = "storages.backends.azure_storage.AzureStorage"
|
||||
AZURE_OVERWRITE_FILES = False
|
||||
set_settings_from_env_if_present(
|
||||
AttrDict(vars()),
|
||||
|
@ -736,6 +736,14 @@ if AZURE_STORAGE_ENABLED:
|
|||
],
|
||||
)
|
||||
|
||||
STORAGES = {
|
||||
"default": {
|
||||
"BACKEND": BASE_FILE_STORAGE,
|
||||
},
|
||||
"staticfiles": {
|
||||
"BACKEND": "django.contrib.staticfiles.storage.StaticFilesStorage",
|
||||
},
|
||||
}
|
||||
|
||||
BASEROW_PUBLIC_URL = os.getenv("BASEROW_PUBLIC_URL")
|
||||
if BASEROW_PUBLIC_URL:
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
import traceback
|
||||
from typing import Any, Callable, List, NamedTuple, Optional, Union
|
||||
from enum import Enum
|
||||
from typing import Any, Callable, List, NamedTuple, Optional, Type, Union
|
||||
|
||||
from celery.schedules import crontab
|
||||
from loguru import logger
|
||||
|
@ -128,3 +129,18 @@ def get_crontab_from_env(env_var_name: str, default_crontab: str) -> crontab:
|
|||
env_var_name, default_crontab
|
||||
).split(" ")
|
||||
return crontab(minute, hour, day_of_week, day_of_month, month_of_year)
|
||||
|
||||
|
||||
def enum_member_by_value(enum: Type[Enum], value: Any) -> Enum:
|
||||
"""
|
||||
Given an enum and a value, returns the enum member that has that value.
|
||||
|
||||
:param enum: The enum to search.
|
||||
:param value: The value to search for.
|
||||
:return: The enum member that has the value.
|
||||
"""
|
||||
|
||||
for e in enum:
|
||||
if e.value == value:
|
||||
return e
|
||||
raise ValueError(f"No enum member with value {value}")
|
||||
|
|
|
@ -21,10 +21,7 @@ class HerokuExternalFileStorageConfiguredHealthCheck(BaseHealthCheckBackend):
|
|||
critical_service = False
|
||||
|
||||
def check_status(self):
|
||||
if (
|
||||
settings.DEFAULT_FILE_STORAGE
|
||||
== "django.core.files.storage.FileSystemStorage"
|
||||
):
|
||||
if settings.BASE_FILE_STORAGE == "django.core.files.storage.FileSystemStorage":
|
||||
raise ServiceWarning(
|
||||
"Any uploaded files will be lost on dyno restart because you have "
|
||||
"not configured an external file storage service. Please set "
|
||||
|
|
|
@ -63,7 +63,8 @@ class HealthCheckHandler:
|
|||
"""
|
||||
|
||||
s3_enabled = (
|
||||
settings.DEFAULT_FILE_STORAGE == "storages.backends.s3boto3.S3Boto3Storage"
|
||||
settings.STORAGES["default"]["BACKEND"]
|
||||
== "storages.backends.s3boto3.S3Boto3Storage"
|
||||
)
|
||||
if s3_enabled:
|
||||
return isinstance(plugin, DefaultFileStorageHealthCheck)
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
# Generated by Django 4.2.13 on 2024-06-24 14:23
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("core", "0087_userprofile_completed_onboarding"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="blacklistedtoken",
|
||||
name="user",
|
||||
),
|
||||
]
|
|
@ -237,9 +237,6 @@ class UserProfile(models.Model):
|
|||
|
||||
class BlacklistedToken(CreatedAndUpdatedOnMixin, models.Model):
|
||||
hashed_token = models.CharField(max_length=64, db_index=True, unique=True)
|
||||
user = models.ForeignKey(
|
||||
User, on_delete=models.CASCADE, null=True
|
||||
) # TODO delete this field in next release
|
||||
expires_at = models.DateTimeField()
|
||||
|
||||
|
||||
|
|
|
@ -683,16 +683,26 @@ class UserHandler(metaclass=baserow_trace_methods(tracer)):
|
|||
except IntegrityError:
|
||||
raise RefreshTokenAlreadyBlacklisted
|
||||
|
||||
def refresh_token_hash_is_blacklisted(self, hashed_token: str) -> bool:
|
||||
"""
|
||||
Checks if the provided refresh token hash is blacklisted.
|
||||
|
||||
:param hashed_token: The refresh token hash that must be checked.
|
||||
:return: Whether the token is blacklisted.
|
||||
"""
|
||||
|
||||
return BlacklistedToken.objects.filter(hashed_token=hashed_token).exists()
|
||||
|
||||
def refresh_token_is_blacklisted(self, refresh_token: str) -> bool:
|
||||
"""
|
||||
Checks if the provided refresh token is blacklisted.
|
||||
Hash the provided refresh token and check if it is blacklisted.
|
||||
|
||||
:param refresh_token: The refresh token that must be checked.
|
||||
:param refresh_token: The refresh token that must be hashed and checked.
|
||||
:return: Whether the token is blacklisted.
|
||||
"""
|
||||
|
||||
hashed_token = generate_hash(refresh_token)
|
||||
return BlacklistedToken.objects.filter(hashed_token=hashed_token).exists()
|
||||
return self.refresh_token_hash_is_blacklisted(hashed_token)
|
||||
|
||||
def _get_email_verification_signer(self) -> URLSafeSerializer:
|
||||
return URLSafeSerializer(settings.SECRET_KEY, "verify-email")
|
||||
|
|
|
@ -1,10 +1,20 @@
|
|||
import unicodedata
|
||||
from dataclasses import asdict, dataclass
|
||||
from typing import Dict, Optional, Union
|
||||
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.core.signing import TimestampSigner
|
||||
|
||||
from rest_framework_simplejwt.tokens import AccessToken, RefreshToken
|
||||
|
||||
from baserow.core.utils import generate_hash
|
||||
|
||||
|
||||
@dataclass
|
||||
class UserSessionPayload:
|
||||
user_id: int
|
||||
token_hash: str
|
||||
|
||||
|
||||
def normalize_email_address(email):
|
||||
"""
|
||||
|
@ -41,10 +51,34 @@ def generate_session_tokens_for_user(
|
|||
if refresh_token and verified_email_claim is not None:
|
||||
refresh_token["verified_email_claim"] = verified_email_claim
|
||||
|
||||
return prepare_user_tokens_payload(access_token, refresh_token)
|
||||
return prepare_user_tokens_payload(user.id, access_token, refresh_token)
|
||||
|
||||
|
||||
def sign_user_session(user_id: int, refresh_token: str) -> str:
|
||||
"""
|
||||
Signs the given user session using the Django signing backend.
|
||||
This session can be used to verify the user's identity in a cookie and will
|
||||
be valid for the same time of the refresh_token lifetime or until the user
|
||||
logs out (blacklisting the token).
|
||||
|
||||
NOTE: Don't use this payload to authenticate users in the API, especially
|
||||
for operations that can change the user's state, to avoid CSRF attacks.
|
||||
This payload is only meant to be used to verify the user's identity in a
|
||||
cookie for GET requests when the Authorization header is not available.
|
||||
|
||||
:param user_id: The user id that must be signed.
|
||||
:param refresh_token: The refresh token defining the session. An hash of this
|
||||
token will be stored in the session to keep it secure.
|
||||
:return: The signed user id.
|
||||
"""
|
||||
|
||||
return TimestampSigner().sign_object(
|
||||
asdict(UserSessionPayload(str(user_id), generate_hash(refresh_token)))
|
||||
)
|
||||
|
||||
|
||||
def prepare_user_tokens_payload(
|
||||
user_id: int,
|
||||
access_token: Union[AccessToken, str],
|
||||
refresh_token: Optional[Union[RefreshToken, str]] = None,
|
||||
) -> Dict[str, str]:
|
||||
|
@ -53,6 +87,7 @@ def prepare_user_tokens_payload(
|
|||
For backward compatibility the access token is also returned under the key
|
||||
`token` (deprecated).
|
||||
|
||||
:param user_id: The user id for which the tokens must be generated.
|
||||
:param access_token: The access token that must be returned.
|
||||
:param refresh_token: The refresh token that must be returned.
|
||||
:return: A dictionary with the access and refresh token.
|
||||
|
@ -65,5 +100,6 @@ def prepare_user_tokens_payload(
|
|||
|
||||
if refresh_token:
|
||||
session_tokens["refresh_token"] = str(refresh_token)
|
||||
session_tokens["user_session"] = sign_user_session(user_id, str(refresh_token))
|
||||
|
||||
return session_tokens
|
||||
|
|
|
@ -31,12 +31,12 @@ from .exceptions import (
|
|||
|
||||
class UserFileHandler:
|
||||
def get_user_file_by_name(
|
||||
self, user_file_name: int, base_queryset: Optional[QuerySet] = None
|
||||
self, user_file_name: str, base_queryset: Optional[QuerySet] = None
|
||||
) -> UserFile:
|
||||
"""
|
||||
Returns the user file with the provided id.
|
||||
|
||||
:param user_file_id: The id of the user file.
|
||||
:param user_file_name: The name of the user file.
|
||||
:param base_queryset: The base queryset that will be used to get the user file.
|
||||
:raises UserFile.DoesNotExist: If the user file does not exist.
|
||||
:return: The user file.
|
||||
|
@ -127,10 +127,10 @@ class UserFileHandler:
|
|||
overwritten.
|
||||
|
||||
:param image: The original Pillow image that serves as base when generating the
|
||||
the image.
|
||||
image.
|
||||
:type image: Image
|
||||
:param user_file: The user file for which the thumbnails must be generated
|
||||
and saved.
|
||||
:param user_file: The user file for which the thumbnails must be generated and
|
||||
saved.
|
||||
:type user_file: UserFile
|
||||
:param storage: The storage where the thumbnails must be saved to.
|
||||
:type storage: Storage or None
|
||||
|
@ -207,11 +207,11 @@ class UserFileHandler:
|
|||
)
|
||||
|
||||
storage = storage or default_storage
|
||||
hash = sha256_hash(stream)
|
||||
stream_hash = sha256_hash(stream)
|
||||
file_name = truncate_middle(file_name, 64)
|
||||
|
||||
existing_user_file = UserFile.objects.filter(
|
||||
original_name=file_name, sha256_hash=hash
|
||||
original_name=file_name, sha256_hash=stream_hash
|
||||
).first()
|
||||
|
||||
if existing_user_file:
|
||||
|
@ -219,7 +219,7 @@ class UserFileHandler:
|
|||
|
||||
extension = pathlib.Path(file_name).suffix[1:].lower()
|
||||
mime_type = mimetypes.guess_type(file_name)[0] or ""
|
||||
unique = self.generate_unique(hash, extension)
|
||||
unique = self.generate_unique(stream_hash, extension)
|
||||
|
||||
# By default the provided file is not an image.
|
||||
image = None
|
||||
|
@ -245,7 +245,7 @@ class UserFileHandler:
|
|||
mime_type=mime_type,
|
||||
unique=unique,
|
||||
uploaded_by=user,
|
||||
sha256_hash=hash,
|
||||
sha256_hash=stream_hash,
|
||||
is_image=is_image,
|
||||
image_width=image_width,
|
||||
image_height=image_height,
|
||||
|
|
|
@ -20,12 +20,12 @@ def test_debug_health_check_does_not_raise_when_debug_false():
|
|||
DebugModeHealthCheck().check_status()
|
||||
|
||||
|
||||
@override_settings(DEFAULT_FILE_STORAGE="django.core.files.storage.FileSystemStorage")
|
||||
@override_settings(BASE_FILE_STORAGE="django.core.files.storage.FileSystemStorage")
|
||||
def test_heroku_health_check_raises_when_default_storage_set():
|
||||
with pytest.raises(ServiceWarning):
|
||||
HerokuExternalFileStorageConfiguredHealthCheck().check_status()
|
||||
|
||||
|
||||
@override_settings(DEFAULT_FILE_STORAGE="storages.backends.s3boto3.S3Boto3Storage")
|
||||
@override_settings(BASE_FILE_STORAGE="storages.backends.s3boto3.S3Boto3Storage")
|
||||
def test_heroku_health_check_doesnt_raise_when_boto_set():
|
||||
HerokuExternalFileStorageConfiguredHealthCheck().check_status()
|
||||
|
|
|
@ -86,8 +86,8 @@ def test_send_reset_user_password_action_type(data_fixture, mailoutbox):
|
|||
def test_reset_user_password_action_type(data_fixture):
|
||||
user = data_fixture.create_user(password="12345678")
|
||||
signer = UserHandler().get_reset_password_signer()
|
||||
signed_user_id = signer.dumps(user.id)
|
||||
user_session = signer.dumps(user.id)
|
||||
user = action_type_registry.get(ResetUserPasswordActionType.type).do(
|
||||
signed_user_id, "12345678"
|
||||
user_session, "12345678"
|
||||
)
|
||||
user.check_password("12345678") is True
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"type": "feature",
|
||||
"message": "Serve files from the backend to authorized users and/or with temporary links.",
|
||||
"issue_number": 2474,
|
||||
"bullet_points": [],
|
||||
"created_at": "2024-06-22"
|
||||
}
|
|
@ -172,6 +172,10 @@ x-backend-variables: &backend-variables
|
|||
BASEROW_OPENAI_MODELS:
|
||||
BASEROW_OLLAMA_HOST:
|
||||
BASEROW_OLLAMA_MODELS:
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND:
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND_PERMISSION:
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND_EXPIRE_SECONDS:
|
||||
|
||||
|
||||
|
||||
services:
|
||||
|
|
|
@ -228,7 +228,7 @@ domain than your Baserow, you need to make sure CORS is configured correctly.
|
|||
#### User File Variables Table
|
||||
|
||||
| Name | Description | Defaults |
|
||||
|--------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------|
|
||||
|----------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------|
|
||||
| MEDIA\_URL | **INTERNAL** The URL at which user uploaded media files will be made available | $PUBLIC\_BACKEND\_URL/media/ |
|
||||
| MEDIA\_ROOT | **INTERNAL** The folder in which the backend will store user uploaded files | /baserow/media or $DATA_DIR/media for the `baserow/baserow` all-in-one image |
|
||||
| **<br>** | | |
|
||||
|
@ -246,6 +246,10 @@ domain than your Baserow, you need to make sure CORS is configured correctly.
|
|||
| **<br>** | | |
|
||||
| AZURE\_* | All AZURE\_ prefixed settings mentioned [here](https://django-storages.readthedocs.io/en/1.13.2/backends/azure.html#settings) are also supported. | |
|
||||
| AZURE\_ACCOUNT\_KEY\_FILE\_PATH | **Optional** The path to a file containing your Azure account key. | |
|
||||
| BASEROW\_SERVE\_FILES\_THROUGH\_BACKEND | Set this value to `true` to have the backend serve files. This feature is disabled by default. This setting does not automatically secure your storage server; additional measures should be taken if it was public to ensure it becomes inaccessible to unauthorized users. Note that it only works if the instance is on the enterprise plan. | | |
|
||||
| BASEROW\_SERVE\_FILES\_THROUGH\_BACKEND\_PERMISSION | If this variable is not set or is left empty, the default behavior is equivalent to setting it to `DISABLED`, meaning no checks will be performed on users attempting to download files. To restrict file downloads to authenticated users, set this variable to `SIGNED_IN`. For an even stricter control, where only authenticated users with access to the workspace containing the file can download it, set the variable to `WORKSPACE_ACCESS`. | |
|
||||
| BASEROW\_SERVE\_FILES\_THROUGH\_BACKEND\_EXPIRE\_SECONDS | When this variable is unset, file links are permanent and always accessible, provided the necessary permissions are met. If assigned a positive integer, this value specifies the link's validity period in seconds. After this duration expires, the link becomes invalid, preventing further file downloads. | |
|
||||
|
||||
|
||||
### Email Configuration
|
||||
| Name | Description | Defaults |
|
||||
|
|
71
docs/installation/secure-file-serve.md
Normal file
71
docs/installation/secure-file-serve.md
Normal file
|
@ -0,0 +1,71 @@
|
|||
# Secure File Serving
|
||||
|
||||
This document outlines the steps to enable secure file serving in Baserow, a feature
|
||||
that allows the backend to serve files directly without needing another web server. This
|
||||
capability introduces several benefits, including the ability to set expiration times
|
||||
for file links and enforce access controls based on user authentication and workspace
|
||||
membership. However, it's important to weigh these benefits against potential
|
||||
performance costs and other considerations.
|
||||
|
||||
Note that this is an enterprise feature.
|
||||
|
||||
## Configuration
|
||||
|
||||
To enable secure file serving, you must configure the following environment variables
|
||||
in your Baserow instance:
|
||||
|
||||
1. **BASEROW_SERVE_FILES_THROUGH_BACKEND**: Set this variable to `true` to activate
|
||||
backend file serving. This feature is disabled by default. Note that enabling this
|
||||
setting does not automatically secure your storage server against unauthorized
|
||||
access. You should take additional security measures if your storage server serves
|
||||
files publicly.
|
||||
|
||||
2. **BASEROW_SERVE_FILES_THROUGH_BACKEND_PERMISSION**: This variable controls access
|
||||
permissions for downloading files. The default setting, `DISABLED`, allows anyone to
|
||||
download files. To restrict downloads to signed-in users, set this to `SIGNED_IN`.
|
||||
For tighter control, where only users with workspace access can download files, use
|
||||
`WORKSPACE_ACCESS`.
|
||||
|
||||
3. **BASEROW_SERVE_FILES_THROUGH_BACKEND_EXPIRE_SECONDS**: Use this variable to set an
|
||||
expiration time for file links, specified in seconds. Unset, or set this to a
|
||||
non-positive integer, makes file links permanent. Setting a positive integer will
|
||||
make the link expire after the specified duration, enhancing security by preventing
|
||||
outdated link access.
|
||||
|
||||
## Benefits
|
||||
|
||||
- **Enhanced Security**: Direct backend serving of files allows for more granular
|
||||
control over who can access files and when.
|
||||
- **Expiration Times**: Ability to set expiration times for file links, reducing the
|
||||
risk of unauthorized access to files over time.
|
||||
- **Access Control**: Ensures that files can only be downloaded by users who are either
|
||||
logged in or have specific workspace access, based on your configuration.
|
||||
|
||||
## Considerations
|
||||
|
||||
- **Performance Cost**: Serving files through the backend can introduce a performance
|
||||
overhead. It may necessitate deploying additional backend (asgi or wsgi) workers to
|
||||
maintain fast response times.
|
||||
- **Enterprise License Required**: This feature requires a valid enterprise license to
|
||||
activate.
|
||||
- **Domain Restrictions for Cookie-Based Authentication**: If using cookie-based user
|
||||
checks, the Baserow instance must be on the same domain or subdomains as the frontend.
|
||||
Cross-domain setups will not support this authentication method.
|
||||
- **User Re-authentication**: Users must sign in again after this feature is enabled to
|
||||
ensure proper access control.
|
||||
- **Publicly Shared Files**: Depending on the configured permission level, files that
|
||||
are publicly shared through applications, views, or APIs may become inaccessible.
|
||||
|
||||
## Steps to Enable
|
||||
|
||||
1. Ensure you have a valid enterprise license for Baserow.
|
||||
2. Configure the environment variables as described in the Configuration section above.
|
||||
3. If your files were previously served directly from a storage service like S3, adjust
|
||||
your storage service settings to ensure files are no longer publicly accessible.
|
||||
Baserow will now handle file serving.
|
||||
4. Consider the performance implications and plan for additional backend workers if
|
||||
necessary.
|
||||
5. Inform users that they may need to sign in again to access files after these changes.
|
||||
|
||||
By following these steps and considerations, you can securely serve files through
|
||||
Baserow, enhancing the security and control over file access within your organization.
|
|
@ -0,0 +1,7 @@
|
|||
from rest_framework.status import HTTP_403_FORBIDDEN
|
||||
|
||||
ERROR_SECURE_FILE_SERVE_EXCEPTION = (
|
||||
"ERROR_SECURE_FILE_SERVE_EXCEPTION",
|
||||
HTTP_403_FORBIDDEN,
|
||||
"The requested signed data is invalid.",
|
||||
)
|
9
enterprise/backend/src/baserow_enterprise/api/secure_file_serve/urls.py
Executable file
9
enterprise/backend/src/baserow_enterprise/api/secure_file_serve/urls.py
Executable file
|
@ -0,0 +1,9 @@
|
|||
from django.urls import re_path
|
||||
|
||||
from .views import DownloadView
|
||||
|
||||
app_name = "baserow_enterprise.api.files"
|
||||
|
||||
urlpatterns = [
|
||||
re_path(r"(?P<signed_data>.*)", DownloadView.as_view(), name="download"),
|
||||
]
|
146
enterprise/backend/src/baserow_enterprise/api/secure_file_serve/views.py
Executable file
146
enterprise/backend/src/baserow_enterprise/api/secure_file_serve/views.py
Executable file
|
@ -0,0 +1,146 @@
|
|||
from typing import Optional
|
||||
from urllib.parse import unquote
|
||||
from urllib.request import Request
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.core.signing import BadSignature, SignatureExpired, TimestampSigner
|
||||
from django.http import FileResponse
|
||||
from django.utils.encoding import smart_str
|
||||
|
||||
from baserow_premium.license.exceptions import FeaturesNotAvailableError
|
||||
from baserow_premium.license.handler import LicenseHandler
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework import authentication, exceptions
|
||||
from rest_framework.renderers import BaseRenderer
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from baserow.api.decorators import map_exceptions
|
||||
from baserow.api.schemas import get_error_schema
|
||||
from baserow.core.user.handler import UserHandler
|
||||
from baserow.core.user.utils import UserSessionPayload
|
||||
from baserow_enterprise.features import SECURE_FILE_SERVE
|
||||
from baserow_enterprise.secure_file_serve.constants import SecureFileServePermission
|
||||
from baserow_enterprise.secure_file_serve.exceptions import SecureFileServeException
|
||||
from baserow_enterprise.secure_file_serve.handler import SecureFileServeHandler
|
||||
|
||||
from .errors import ERROR_SECURE_FILE_SERVE_EXCEPTION
|
||||
|
||||
|
||||
class BinaryRenderer(BaseRenderer):
|
||||
media_type = "application/octet-stream"
|
||||
format = "bin"
|
||||
charset = None
|
||||
render_style = "binary"
|
||||
|
||||
def render(self, data, accepted_media_type=None, renderer_context=None):
|
||||
return data
|
||||
|
||||
|
||||
def extract_user_session_from_request(
|
||||
request: Request, max_age: int = settings.REFRESH_TOKEN_LIFETIME.total_seconds()
|
||||
) -> Optional[UserSessionPayload]:
|
||||
"""
|
||||
Extracts the user id from the user_session cookie value. The cookie is signed with a
|
||||
TimestampSigner and can be used to verify the user's identity. Look at the
|
||||
generate_session_tokens_for_user for more information on how the cookie is signed.
|
||||
Ensure your client is sending that value as cookie with the user_session key.
|
||||
|
||||
:param request: The request object.
|
||||
:param max_age: The max age of the signed data.
|
||||
:return: The user session payload if the cookie is valid, otherwise None.
|
||||
"""
|
||||
|
||||
cookie = request.COOKIES.get("user_session", None) or ""
|
||||
signer = TimestampSigner()
|
||||
|
||||
try:
|
||||
return UserSessionPayload(
|
||||
**signer.unsign_object(unquote(cookie), max_age=max_age)
|
||||
)
|
||||
except (BadSignature, SignatureExpired):
|
||||
return None
|
||||
|
||||
|
||||
class SecureFileServeAuthentication(authentication.BaseAuthentication):
|
||||
def authenticate(self, request):
|
||||
"""
|
||||
Extract the user session payload from the cookie and verify
|
||||
that an active user exists and the token created in this
|
||||
session is not blacklisted.
|
||||
"""
|
||||
|
||||
err_msg = "Missing or invalid user session."
|
||||
|
||||
user_session = extract_user_session_from_request(request)
|
||||
if user_session is None:
|
||||
raise exceptions.AuthenticationFailed(err_msg)
|
||||
|
||||
user_model = get_user_model()
|
||||
try:
|
||||
user = user_model.objects.select_related("profile").get(
|
||||
id=user_session.user_id
|
||||
)
|
||||
except user_model.DoesNotExist:
|
||||
raise exceptions.AuthenticationFailed(err_msg)
|
||||
|
||||
if not user.is_active or UserHandler().refresh_token_hash_is_blacklisted(
|
||||
user_session.token_hash
|
||||
):
|
||||
raise exceptions.AuthenticationFailed(err_msg)
|
||||
|
||||
return (user, request)
|
||||
|
||||
|
||||
class DownloadView(APIView):
|
||||
permission_classes = []
|
||||
|
||||
@property
|
||||
def authentication_classes(self):
|
||||
if (
|
||||
settings.BASEROW_SERVE_FILES_THROUGH_BACKEND_PERMISSION
|
||||
!= SecureFileServePermission.DISABLED
|
||||
):
|
||||
return [SecureFileServeAuthentication]
|
||||
else:
|
||||
return []
|
||||
|
||||
renderer_classes = [BinaryRenderer]
|
||||
|
||||
@extend_schema(
|
||||
tags=["Secure file serve"],
|
||||
operation_id="secure_file_serve_download",
|
||||
description=(
|
||||
"Downloads a file using the backend and the secure file serve feature. "
|
||||
"The signed data is extracted from the URL and used to verify if the "
|
||||
"user has access to the file. If the permissions check passes and the "
|
||||
"file exists, the file is served to the user."
|
||||
"\n\nThis is a **enterprise** feature."
|
||||
),
|
||||
responses={
|
||||
200: {"description": "File download"},
|
||||
403: get_error_schema(["ERROR_SECURE_FILE_SERVE_EXCEPTION"]),
|
||||
},
|
||||
auth=[],
|
||||
)
|
||||
@map_exceptions(
|
||||
{
|
||||
SecureFileServeException: ERROR_SECURE_FILE_SERVE_EXCEPTION,
|
||||
}
|
||||
)
|
||||
def get(self, request: Request, signed_data: str) -> FileResponse:
|
||||
if not LicenseHandler.instance_has_feature(SECURE_FILE_SERVE):
|
||||
raise FeaturesNotAvailableError()
|
||||
|
||||
secure_file = SecureFileServeHandler().extract_file_info_or_raise(
|
||||
request.user, signed_data
|
||||
)
|
||||
|
||||
download_file_name = request.GET.get("dl", "")
|
||||
as_attachment = bool(download_file_name)
|
||||
|
||||
return FileResponse(
|
||||
secure_file.open(),
|
||||
as_attachment=as_attachment,
|
||||
filename=smart_str(download_file_name or secure_file.name),
|
||||
)
|
|
@ -3,6 +3,7 @@ from django.urls import include, path
|
|||
from .admin import urls as admin_urls
|
||||
from .audit_log import urls as audit_log_urls
|
||||
from .role import urls as role_urls
|
||||
from .secure_file_serve import urls as secure_file_serve_urls
|
||||
from .sso import urls as sso_urls
|
||||
from .teams import urls as teams_urls
|
||||
|
||||
|
@ -14,4 +15,5 @@ urlpatterns = [
|
|||
path("admin/", include(admin_urls, namespace="admin")),
|
||||
path("sso/", include(sso_urls, namespace="sso")),
|
||||
path("audit-log/", include(audit_log_urls, namespace="audit_log")),
|
||||
path("files/", include(secure_file_serve_urls, namespace="files")),
|
||||
]
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
import os
|
||||
|
||||
from baserow.config.settings.utils import enum_member_by_value
|
||||
from baserow_enterprise.secure_file_serve.constants import SecureFileServePermission
|
||||
|
||||
|
||||
def setup(settings):
|
||||
"""
|
||||
|
@ -13,11 +16,12 @@ def setup(settings):
|
|||
"""
|
||||
|
||||
settings.BASEROW_ENTERPRISE_AUDIT_LOG_CLEANUP_INTERVAL_MINUTES = int(
|
||||
os.getenv("BASEROW_ENTERPRISE_AUDIT_LOG_CLEANUP_INTERVAL_MINUTES", 24 * 60)
|
||||
os.getenv("BASEROW_ENTERPRISE_AUDIT_LOG_CLEANUP_INTERVAL_MINUTES", "")
|
||||
or 24 * 60
|
||||
)
|
||||
|
||||
settings.BASEROW_ENTERPRISE_AUDIT_LOG_RETENTION_DAYS = int(
|
||||
os.getenv("BASEROW_ENTERPRISE_AUDIT_LOG_RETENTION_DAYS", 365)
|
||||
os.getenv("BASEROW_ENTERPRISE_AUDIT_LOG_RETENTION_DAYS", "") or 365
|
||||
)
|
||||
|
||||
# Set this to True to enable users to login with auth providers different than
|
||||
|
@ -25,3 +29,29 @@ def setup(settings):
|
|||
settings.BASEROW_ALLOW_MULTIPLE_SSO_PROVIDERS_FOR_SAME_ACCOUNT = bool(
|
||||
os.getenv("BASEROW_ALLOW_MULTIPLE_SSO_PROVIDERS_FOR_SAME_ACCOUNT", False)
|
||||
)
|
||||
|
||||
serve_files_through_backend_permission = (
|
||||
os.getenv("BASEROW_SERVE_FILES_THROUGH_BACKEND_PERMISSION", "")
|
||||
or SecureFileServePermission.DISABLED.value
|
||||
)
|
||||
|
||||
settings.BASEROW_SERVE_FILES_THROUGH_BACKEND_PERMISSION = enum_member_by_value(
|
||||
SecureFileServePermission, serve_files_through_backend_permission
|
||||
)
|
||||
|
||||
# If the expire seconds is not set to a number greater than zero, the signature will
|
||||
# never expire.
|
||||
settings.BASEROW_SERVE_FILES_THROUGH_BACKEND_EXPIRE_SECONDS = (
|
||||
int(os.getenv("BASEROW_SERVE_FILES_THROUGH_BACKEND_EXPIRE_SECONDS", "") or 0)
|
||||
or None
|
||||
)
|
||||
|
||||
serve_files_through_backend = bool(
|
||||
os.getenv("BASEROW_SERVE_FILES_THROUGH_BACKEND", False)
|
||||
)
|
||||
if serve_files_through_backend:
|
||||
settings.STORAGES["default"][
|
||||
"BACKEND"
|
||||
] = "baserow_enterprise.secure_file_serve.storage.EnterpriseFileStorage"
|
||||
|
||||
settings.BASEROW_SERVE_FILES_THROUGH_BACKEND = serve_files_through_backend
|
||||
|
|
|
@ -4,4 +4,5 @@ SSO = "sso"
|
|||
SUPPORT = "support"
|
||||
AUDIT_LOG = "audit_log"
|
||||
METRICS = "metrics"
|
||||
SECURE_FILE_SERVE = "secure_file_serve"
|
||||
ENTERPRISE_SETTINGS = "ENTERPRISE_SETTINGS"
|
||||
|
|
|
@ -9,6 +9,7 @@ from baserow_enterprise.features import (
|
|||
AUDIT_LOG,
|
||||
ENTERPRISE_SETTINGS,
|
||||
RBAC,
|
||||
SECURE_FILE_SERVE,
|
||||
SSO,
|
||||
SUPPORT,
|
||||
TEAMS,
|
||||
|
@ -21,7 +22,15 @@ from baserow_enterprise.role.seat_usage_calculator import (
|
|||
class EnterpriseWithoutSupportLicenseType(LicenseType):
|
||||
type = "enterprise_without_support"
|
||||
order = 100
|
||||
features = [PREMIUM, RBAC, SSO, TEAMS, AUDIT_LOG, ENTERPRISE_SETTINGS]
|
||||
features = [
|
||||
PREMIUM,
|
||||
RBAC,
|
||||
SSO,
|
||||
TEAMS,
|
||||
AUDIT_LOG,
|
||||
SECURE_FILE_SERVE,
|
||||
ENTERPRISE_SETTINGS,
|
||||
]
|
||||
instance_wide = True
|
||||
seats_manually_assigned = False
|
||||
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
from enum import Enum
|
||||
|
||||
|
||||
class SecureFileServePermission(Enum):
|
||||
DISABLED = "DISABLED"
|
||||
SIGNED_IN = "SIGNED_IN"
|
||||
WORKSPACE_ACCESS = "WORKSPACE_ACCESS"
|
||||
|
||||
|
||||
SECURE_FILE_SERVE_SIGNER_SALT = "secure_file_serve"
|
|
@ -0,0 +1,3 @@
|
|||
class SecureFileServeException(Exception):
|
||||
def __init__(self, message):
|
||||
self.message = message
|
|
@ -0,0 +1,100 @@
|
|||
from dataclasses import dataclass
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.core.files.storage import default_storage
|
||||
from django.core.signing import BadSignature, SignatureExpired
|
||||
|
||||
from baserow.core.handler import CoreHandler
|
||||
from baserow.core.operations import ReadWorkspaceOperationType
|
||||
from baserow_enterprise.secure_file_serve.constants import SecureFileServePermission
|
||||
from baserow_enterprise.secure_file_serve.exceptions import SecureFileServeException
|
||||
from baserow_enterprise.secure_file_serve.storage import (
|
||||
EnterpriseFileStorage,
|
||||
SecureFileServeSignerPayload,
|
||||
)
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
@dataclass
|
||||
class SecureFile:
|
||||
name: str
|
||||
path: str
|
||||
|
||||
def open(self, mode="rb"):
|
||||
return default_storage.open(self.path, mode)
|
||||
|
||||
|
||||
class SecureFileServeHandler:
|
||||
def unsign_data(self, signed_path: str) -> SecureFileServeSignerPayload:
|
||||
"""
|
||||
Unsign the signed data and returns the payload. If the signature is invalid or
|
||||
expired, a SecureFileServeException is raised.
|
||||
|
||||
:param signed_path: The signed data to unsign.
|
||||
:return: The payload extracted from the signed data.
|
||||
:raises SecureFileServeException: If the signature is invalid or expired.
|
||||
"""
|
||||
|
||||
try:
|
||||
unsigned_data = EnterpriseFileStorage.unsign_data(signed_path)
|
||||
except SignatureExpired:
|
||||
raise SecureFileServeException("File expired")
|
||||
except BadSignature:
|
||||
raise SecureFileServeException("Invalid signature")
|
||||
return unsigned_data
|
||||
|
||||
def raise_if_user_does_not_have_permissions(self, user, data):
|
||||
# TODO: complete this method
|
||||
if (
|
||||
settings.BASEROW_SERVE_FILES_THROUGH_BACKEND_PERMISSION
|
||||
== SecureFileServePermission.WORKSPACE_ACCESS
|
||||
):
|
||||
workspace_id = data.get("workspace_id", None)
|
||||
if not workspace_id:
|
||||
raise SecureFileServeException("Workspace id is required")
|
||||
|
||||
workspace = CoreHandler().get_workspace(workspace_id)
|
||||
has_permission = CoreHandler().check_permissions(
|
||||
user,
|
||||
ReadWorkspaceOperationType.type,
|
||||
workspace=workspace,
|
||||
context=workspace,
|
||||
)
|
||||
if not has_permission:
|
||||
raise SecureFileServeException("Can't access file")
|
||||
|
||||
def get_file_path(self, data: SecureFileServeSignerPayload):
|
||||
file_path = data.name
|
||||
|
||||
if not default_storage.exists(file_path):
|
||||
raise SecureFileServeException("File does not exist")
|
||||
return file_path
|
||||
|
||||
def get_file_name(self, file_path):
|
||||
if not file_path:
|
||||
return ""
|
||||
return file_path.split("/")[-1]
|
||||
|
||||
def extract_file_info_or_raise(
|
||||
self, user: AbstractUser, signed_data: str
|
||||
) -> SecureFile:
|
||||
"""
|
||||
Extracts the file name and the file path from the signed data or raises an
|
||||
exception if the user does not have access to the file or the signature is
|
||||
expired or invalid.
|
||||
|
||||
:param user: The user that must be in the workspace.
|
||||
:param signed_data: The signed data extracted from the URL.
|
||||
:return: The file info object containing the file name and the file path.
|
||||
:raises SecureFileServeException: If the user does not have access to the file
|
||||
or the signature is expired or invalid.
|
||||
"""
|
||||
|
||||
unsigned_data = self.unsign_data(signed_data)
|
||||
self.raise_if_user_does_not_have_permissions(user, unsigned_data)
|
||||
file_path = self.get_file_path(unsigned_data)
|
||||
file_name = self.get_file_name(file_path)
|
||||
return SecureFile(file_name, file_path)
|
|
@ -0,0 +1,110 @@
|
|||
import threading
|
||||
from contextlib import contextmanager
|
||||
from dataclasses import asdict, dataclass
|
||||
from typing import Optional
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.signing import BadSignature, TimestampSigner
|
||||
from django.urls import reverse
|
||||
from django.utils.module_loading import import_string
|
||||
|
||||
from .constants import SECURE_FILE_SERVE_SIGNER_SALT
|
||||
|
||||
_thread_locals = threading.local()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def workspace_id_context(workspace_id):
|
||||
_thread_locals.workspace_id = workspace_id
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
_thread_locals.workspace_id = None
|
||||
|
||||
|
||||
class EnterpriseFileStorageMeta(type):
|
||||
def __new__(cls, name, bases, dct):
|
||||
base_class = import_string(settings.BASE_FILE_STORAGE)
|
||||
return super().__new__(cls, name, (base_class,), dct)
|
||||
|
||||
|
||||
def _get_signer():
|
||||
"""
|
||||
Returns a signer object that can be used to sign and unsign file names.
|
||||
"""
|
||||
|
||||
return TimestampSigner(salt=SECURE_FILE_SERVE_SIGNER_SALT)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SecureFileServeSignerPayload:
|
||||
name: str
|
||||
workspace_id: Optional[int] = None
|
||||
|
||||
|
||||
class EnterpriseFileStorage(metaclass=EnterpriseFileStorageMeta):
|
||||
"""
|
||||
Overrides the default file storage class to provide a way to sign and unsign file
|
||||
names. This is used to securely serve files through the backend. The file name is
|
||||
signed and then returned as a URL. The URL can be used to download the file. The
|
||||
signature is verified before serving the file to ensure that the user has access to
|
||||
the file.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def sign_data(cls, name: str) -> str:
|
||||
"""
|
||||
Signs the data and returns the signed data.
|
||||
|
||||
:param name: The name of the file to sign.
|
||||
:return: The signed data.
|
||||
"""
|
||||
|
||||
signer = _get_signer()
|
||||
|
||||
workspace_id = getattr(_thread_locals, "workspace_id", None)
|
||||
return signer.sign_object(
|
||||
asdict(SecureFileServeSignerPayload(name, workspace_id))
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def unsign_data(cls, signed_data: str) -> SecureFileServeSignerPayload:
|
||||
"""
|
||||
Unsign the signed data and returns the payload. If the signature is invalid or
|
||||
expired, a BadSignature or SignatureExpired exception is raised.
|
||||
|
||||
:param signed_data: The signed data to unsign.
|
||||
:return: The payload extracted from the signed data.
|
||||
:raises BadSignature: If the signature is invalid.
|
||||
:raises SignatureExpired: If the signature is expired.
|
||||
"""
|
||||
|
||||
signer = _get_signer()
|
||||
try:
|
||||
return SecureFileServeSignerPayload(
|
||||
**signer.unsign_object(
|
||||
signed_data,
|
||||
max_age=settings.BASEROW_SERVE_FILES_THROUGH_BACKEND_EXPIRE_SECONDS,
|
||||
)
|
||||
)
|
||||
except TypeError:
|
||||
raise BadSignature("Malformed payload")
|
||||
|
||||
def get_signed_file_path(self, name: str) -> str:
|
||||
"""
|
||||
Signs the file name and returns the signed file path to the file to serve via
|
||||
the backend.
|
||||
|
||||
:param name: The name of the file to sign.
|
||||
:return: The signed file path to the file to serve via the backend.
|
||||
"""
|
||||
|
||||
return reverse(
|
||||
"api:enterprise:files:download",
|
||||
kwargs={"signed_data": self.sign_data(name)},
|
||||
)
|
||||
|
||||
def url(self, name):
|
||||
signed_path = self.get_signed_file_path(name)
|
||||
return urljoin(settings.PUBLIC_BACKEND_URL, signed_path)
|
|
@ -0,0 +1,755 @@
|
|||
from io import BytesIO
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.files.storage import default_storage
|
||||
from django.core.files.uploadedfile import SimpleUploadedFile
|
||||
from django.shortcuts import reverse
|
||||
from django.test.utils import override_settings
|
||||
|
||||
import pytest
|
||||
from freezegun import freeze_time
|
||||
from rest_framework.status import (
|
||||
HTTP_200_OK,
|
||||
HTTP_202_ACCEPTED,
|
||||
HTTP_402_PAYMENT_REQUIRED,
|
||||
HTTP_403_FORBIDDEN,
|
||||
)
|
||||
from rest_framework_simplejwt.tokens import RefreshToken
|
||||
from rest_framework_simplejwt.utils import datetime_from_epoch
|
||||
|
||||
from baserow.core.user.handler import UserHandler
|
||||
|
||||
|
||||
def dummy_storage(tmpdir):
|
||||
class FakeFileSystemStorage(default_storage.__class__):
|
||||
def exists(self, name: str) -> bool:
|
||||
return True
|
||||
|
||||
def get_available_name(self, name: str, max_length: int | None = ...) -> str:
|
||||
return "test.txt"
|
||||
|
||||
def open(self, name, mode="rb"):
|
||||
return BytesIO(b"Hello World")
|
||||
|
||||
def delete(self, name):
|
||||
pass
|
||||
|
||||
return FakeFileSystemStorage(location=str(tmpdir), base_url="http://localhost")
|
||||
|
||||
|
||||
@pytest.mark.django_db()
|
||||
@override_settings(DEBUG=True)
|
||||
def test_files_are_served_by_base_file_storage_by_default(
|
||||
enterprise_data_fixture, api_client, tmpdir
|
||||
):
|
||||
_, token = enterprise_data_fixture.create_user_and_token()
|
||||
|
||||
with patch(
|
||||
"baserow.core.user_files.handler.default_storage", new=dummy_storage(tmpdir)
|
||||
):
|
||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||
response = api_client.post(
|
||||
reverse("api:user_files:upload_file"),
|
||||
data={"file": file},
|
||||
format="multipart",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK, response.json()
|
||||
assert response.json()["url"].startswith(settings.MEDIA_URL)
|
||||
|
||||
|
||||
@pytest.mark.django_db()
|
||||
@override_settings(
|
||||
DEBUG=True,
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND=True,
|
||||
STORAGES={
|
||||
"default": {
|
||||
"BACKEND": "baserow_enterprise.secure_file_serve.storage.EnterpriseFileStorage"
|
||||
}
|
||||
},
|
||||
)
|
||||
def test_files_can_be_served_by_the_backend(
|
||||
enterprise_data_fixture, api_client, tmpdir
|
||||
):
|
||||
_, token = enterprise_data_fixture.create_user_and_token()
|
||||
|
||||
with patch(
|
||||
"baserow.core.user_files.handler.default_storage", new=dummy_storage(tmpdir)
|
||||
):
|
||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||
response = api_client.post(
|
||||
reverse("api:user_files:upload_file"),
|
||||
data={"file": file},
|
||||
format="multipart",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK, response.json()
|
||||
assert response.json()["url"].startswith("http://localhost:8000/api/files/")
|
||||
|
||||
|
||||
@pytest.mark.django_db()
|
||||
@override_settings(
|
||||
DEBUG=True,
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND=True,
|
||||
STORAGES={
|
||||
"default": {
|
||||
"BACKEND": "baserow_enterprise.secure_file_serve.storage.EnterpriseFileStorage"
|
||||
}
|
||||
},
|
||||
)
|
||||
def test_secure_file_serve_requires_license_to_download_files(
|
||||
enterprise_data_fixture, api_client, tmpdir
|
||||
):
|
||||
_, token = enterprise_data_fixture.create_user_and_token()
|
||||
|
||||
with patch(
|
||||
"baserow.core.user_files.handler.default_storage", new=dummy_storage(tmpdir)
|
||||
):
|
||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||
response = api_client.post(
|
||||
reverse("api:user_files:upload_file"),
|
||||
data={"file": file},
|
||||
format="multipart",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK, response.json()
|
||||
backend_file_url = response.json()["url"]
|
||||
|
||||
response = api_client.get(
|
||||
backend_file_url.replace("http://localhost:8000", ""),
|
||||
)
|
||||
assert response.status_code == HTTP_402_PAYMENT_REQUIRED
|
||||
|
||||
|
||||
@pytest.mark.django_db()
|
||||
@override_settings(
|
||||
DEBUG=True,
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND=True,
|
||||
STORAGES={
|
||||
"default": {
|
||||
"BACKEND": "baserow_enterprise.secure_file_serve.storage.EnterpriseFileStorage"
|
||||
}
|
||||
},
|
||||
)
|
||||
def test_files_can_be_downloaded_by_the_backend_with_valid_license(
|
||||
enable_enterprise, enterprise_data_fixture, api_client, tmpdir
|
||||
):
|
||||
_, token = enterprise_data_fixture.create_user_and_token()
|
||||
|
||||
storage = dummy_storage(tmpdir)
|
||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||
response = api_client.post(
|
||||
reverse("api:user_files:upload_file"),
|
||||
data={"file": file},
|
||||
format="multipart",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK, response.json()
|
||||
backend_file_url = response.json()["url"]
|
||||
|
||||
with patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
||||
):
|
||||
response = api_client.get(
|
||||
backend_file_url.replace("http://localhost:8000", ""),
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
|
||||
@pytest.mark.django_db()
|
||||
@override_settings(
|
||||
DEBUG=True,
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND=True,
|
||||
STORAGES={
|
||||
"default": {
|
||||
"BACKEND": "baserow_enterprise.secure_file_serve.storage.EnterpriseFileStorage"
|
||||
}
|
||||
},
|
||||
)
|
||||
def test_files_urls_must_be_valid(
|
||||
enable_enterprise, enterprise_data_fixture, api_client, tmpdir
|
||||
):
|
||||
_, token = enterprise_data_fixture.create_user_and_token()
|
||||
|
||||
storage = dummy_storage(tmpdir)
|
||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||
response = api_client.post(
|
||||
reverse("api:user_files:upload_file"),
|
||||
data={"file": file},
|
||||
format="multipart",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK, response.json()
|
||||
backend_file_url = response.json()["url"]
|
||||
|
||||
# Even with a dummy storage returning always the same file, if the signed data is
|
||||
# invalid the file cannot be downloaded
|
||||
with patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
||||
):
|
||||
response = api_client.get(
|
||||
reverse("api:enterprise:files:download", kwargs={"signed_data": ""}),
|
||||
)
|
||||
assert response.status_code == HTTP_403_FORBIDDEN
|
||||
|
||||
# Even with a dummy storage returning always the same file, if the signed data is
|
||||
# invalid the file cannot be downloaded
|
||||
with patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
||||
):
|
||||
response = api_client.get(
|
||||
reverse("api:enterprise:files:download", kwargs={"signed_data": "invalid"}),
|
||||
)
|
||||
assert response.status_code == HTTP_403_FORBIDDEN
|
||||
|
||||
# Remove a couple of characters from the signed data
|
||||
with patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
||||
):
|
||||
response = api_client.get(
|
||||
backend_file_url.replace("http://localhost:8000", "")[:-2],
|
||||
)
|
||||
assert response.status_code == HTTP_403_FORBIDDEN
|
||||
|
||||
|
||||
@pytest.mark.django_db()
|
||||
@override_settings(
|
||||
DEBUG=True,
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND=True,
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND_EXPIRE_SECONDS=59,
|
||||
STORAGES={
|
||||
"default": {
|
||||
"BACKEND": "baserow_enterprise.secure_file_serve.storage.EnterpriseFileStorage"
|
||||
}
|
||||
},
|
||||
)
|
||||
def test_files_urls_can_expire(
|
||||
enable_enterprise, enterprise_data_fixture, api_client, tmpdir
|
||||
):
|
||||
user = enterprise_data_fixture.create_user()
|
||||
|
||||
storage = dummy_storage(tmpdir)
|
||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
||||
with freeze_time("2024-01-01 12:00:00"):
|
||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||
token = enterprise_data_fixture.generate_token(user)
|
||||
response = api_client.post(
|
||||
reverse("api:user_files:upload_file"),
|
||||
data={"file": file},
|
||||
format="multipart",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK, response.json()
|
||||
backend_file_url = response.json()["url"]
|
||||
|
||||
# before expiration the url can be accessed
|
||||
with (
|
||||
patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
||||
),
|
||||
freeze_time("2024-01-01 12:00:59"),
|
||||
):
|
||||
token = enterprise_data_fixture.generate_token(user)
|
||||
response = api_client.get(
|
||||
backend_file_url.replace("http://localhost:8000", ""),
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
# After expiration the url cannot be accessed anymore
|
||||
with (
|
||||
patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
||||
),
|
||||
freeze_time("2024-01-01 12:01:00"),
|
||||
):
|
||||
token = enterprise_data_fixture.generate_token(user)
|
||||
response = api_client.get(
|
||||
backend_file_url.replace("http://localhost:8000", ""),
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_403_FORBIDDEN
|
||||
|
||||
|
||||
@pytest.mark.django_db()
|
||||
@override_settings(
|
||||
DEBUG=True,
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND=True,
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND_PERMISSION="SIGNED_IN",
|
||||
STORAGES={
|
||||
"default": {
|
||||
"BACKEND": "baserow_enterprise.secure_file_serve.storage.EnterpriseFileStorage"
|
||||
}
|
||||
},
|
||||
)
|
||||
def test_only_authenticated_users_can_download_files(
|
||||
enable_enterprise, enterprise_data_fixture, api_client, tmpdir
|
||||
):
|
||||
user = enterprise_data_fixture.create_user(password="password")
|
||||
|
||||
# Login to generate the signed cookie we need to download files
|
||||
response = api_client.post(
|
||||
reverse("api:user:token_auth"),
|
||||
data={"email": user.email, "password": "password"},
|
||||
format="json",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
cookie = response.json()["user_session"]
|
||||
|
||||
storage = dummy_storage(tmpdir)
|
||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||
token = enterprise_data_fixture.generate_token(user)
|
||||
response = api_client.post(
|
||||
reverse("api:user_files:upload_file"),
|
||||
data={"file": file},
|
||||
format="multipart",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK, response.json()
|
||||
backend_file_url = response.json()["url"]
|
||||
|
||||
# without cookie the url cannot be accessed
|
||||
with patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
||||
):
|
||||
token = enterprise_data_fixture.generate_token(user)
|
||||
response = api_client.get(
|
||||
backend_file_url.replace("http://localhost:8000", ""),
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_403_FORBIDDEN
|
||||
|
||||
# with cookie the url can be accessed
|
||||
with patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
||||
):
|
||||
token = enterprise_data_fixture.generate_token(user)
|
||||
response = api_client.get(
|
||||
backend_file_url.replace("http://localhost:8000", ""),
|
||||
HTTP_COOKIE=f"user_session={cookie}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
|
||||
@pytest.mark.django_db()
|
||||
@override_settings(
|
||||
DEBUG=True,
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND=True,
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND_PERMISSION="SIGNED_IN",
|
||||
STORAGES={
|
||||
"default": {
|
||||
"BACKEND": "baserow_enterprise.secure_file_serve.storage.EnterpriseFileStorage"
|
||||
}
|
||||
},
|
||||
)
|
||||
def test_sign_out_prevents_file_download(
|
||||
enable_enterprise, enterprise_data_fixture, api_client, tmpdir
|
||||
):
|
||||
user = enterprise_data_fixture.create_user(password="password")
|
||||
|
||||
# Login to generate the signed cookie we need to download files
|
||||
response = api_client.post(
|
||||
reverse("api:user:token_auth"),
|
||||
data={"email": user.email, "password": "password"},
|
||||
format="json",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
cookie = response.json()["user_session"]
|
||||
refresh_token = response.json()["refresh_token"]
|
||||
|
||||
storage = dummy_storage(tmpdir)
|
||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||
token = enterprise_data_fixture.generate_token(user)
|
||||
response = api_client.post(
|
||||
reverse("api:user_files:upload_file"),
|
||||
data={"file": file},
|
||||
format="multipart",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK, response.json()
|
||||
backend_file_url = response.json()["url"]
|
||||
|
||||
with patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
||||
):
|
||||
token = enterprise_data_fixture.generate_token(user)
|
||||
response = api_client.get(
|
||||
backend_file_url.replace("http://localhost:8000", ""),
|
||||
HTTP_COOKIE=f"user_session={cookie}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
# If the user signs out, the cookie is invalidated and the url cannot be accessed
|
||||
expires_at = datetime_from_epoch(RefreshToken(refresh_token)["exp"])
|
||||
UserHandler().blacklist_refresh_token(refresh_token, expires_at)
|
||||
|
||||
with patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
||||
):
|
||||
token = enterprise_data_fixture.generate_token(user)
|
||||
response = api_client.get(
|
||||
backend_file_url.replace("http://localhost:8000", ""),
|
||||
HTTP_COOKIE=f"user_session={cookie}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_403_FORBIDDEN
|
||||
|
||||
|
||||
@pytest.mark.django_db()
|
||||
@override_settings(
|
||||
DEBUG=True,
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND=True,
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND_PERMISSION="SIGNED_IN",
|
||||
STORAGES={
|
||||
"default": {
|
||||
"BACKEND": "baserow_enterprise.secure_file_serve.storage.EnterpriseFileStorage"
|
||||
}
|
||||
},
|
||||
)
|
||||
def test_deactivate_user_prevents_file_download(
|
||||
enable_enterprise, enterprise_data_fixture, api_client, tmpdir
|
||||
):
|
||||
user = enterprise_data_fixture.create_user(password="password")
|
||||
|
||||
# Login to generate the signed cookie we need to download files
|
||||
response = api_client.post(
|
||||
reverse("api:user:token_auth"),
|
||||
data={"email": user.email, "password": "password"},
|
||||
format="json",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
cookie = response.json()["user_session"]
|
||||
|
||||
storage = dummy_storage(tmpdir)
|
||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||
token = enterprise_data_fixture.generate_token(user)
|
||||
response = api_client.post(
|
||||
reverse("api:user_files:upload_file"),
|
||||
data={"file": file},
|
||||
format="multipart",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK, response.json()
|
||||
backend_file_url = response.json()["url"]
|
||||
|
||||
with patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
||||
):
|
||||
token = enterprise_data_fixture.generate_token(user)
|
||||
response = api_client.get(
|
||||
backend_file_url.replace("http://localhost:8000", ""),
|
||||
HTTP_COOKIE=f"user_session={cookie}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
user.is_active = False
|
||||
user.save()
|
||||
|
||||
with patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
||||
):
|
||||
token = enterprise_data_fixture.generate_token(user)
|
||||
response = api_client.get(
|
||||
backend_file_url.replace("http://localhost:8000", ""),
|
||||
HTTP_COOKIE=f"user_session={cookie}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_403_FORBIDDEN
|
||||
|
||||
|
||||
@pytest.mark.django_db()
|
||||
@override_settings(
|
||||
DEBUG=True,
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND=True,
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND_EXPIRE_SECONDS=59,
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND_PERMISSION="SIGNED_IN",
|
||||
STORAGES={
|
||||
"default": {
|
||||
"BACKEND": "baserow_enterprise.secure_file_serve.storage.EnterpriseFileStorage"
|
||||
}
|
||||
},
|
||||
)
|
||||
def test_files_urls_can_expire_also_for_authenticated_users(
|
||||
enable_enterprise, enterprise_data_fixture, api_client, tmpdir
|
||||
):
|
||||
user = enterprise_data_fixture.create_user(password="password")
|
||||
|
||||
# Login to generate the signed cookie we need to download files
|
||||
response = api_client.post(
|
||||
reverse("api:user:token_auth"),
|
||||
data={"email": user.email, "password": "password"},
|
||||
format="json",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
cookie = response.json()["user_session"]
|
||||
|
||||
storage = dummy_storage(tmpdir)
|
||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
||||
with freeze_time("2024-01-01 12:00:00"):
|
||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||
token = enterprise_data_fixture.generate_token(user)
|
||||
response = api_client.post(
|
||||
reverse("api:user_files:upload_file"),
|
||||
data={"file": file},
|
||||
format="multipart",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK, response.json()
|
||||
backend_file_url = response.json()["url"]
|
||||
|
||||
# without cookie the url cannot be accessed
|
||||
with (
|
||||
patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
||||
),
|
||||
freeze_time("2024-01-01 12:00:59"),
|
||||
):
|
||||
token = enterprise_data_fixture.generate_token(user)
|
||||
response = api_client.get(
|
||||
backend_file_url.replace("http://localhost:8000", ""),
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_403_FORBIDDEN
|
||||
|
||||
# with cookie the url can be accessed
|
||||
with (
|
||||
patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
||||
),
|
||||
freeze_time("2024-01-01 12:00:59"),
|
||||
):
|
||||
token = enterprise_data_fixture.generate_token(user)
|
||||
response = api_client.get(
|
||||
backend_file_url.replace("http://localhost:8000", ""),
|
||||
HTTP_COOKIE=f"user_session={cookie}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
# after expiration the url cannot be accessed anymore, even with cookie
|
||||
with (
|
||||
patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
||||
),
|
||||
freeze_time("2024-01-01 12:01:00"),
|
||||
):
|
||||
token = enterprise_data_fixture.generate_token(user)
|
||||
response = api_client.get(
|
||||
backend_file_url.replace("http://localhost:8000", ""),
|
||||
HTTP_COOKIE=f"user_session={cookie}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_403_FORBIDDEN
|
||||
|
||||
|
||||
@pytest.mark.django_db()
|
||||
@override_settings(
|
||||
DEBUG=True,
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND=True,
|
||||
STORAGES={
|
||||
"default": {
|
||||
"BACKEND": "baserow_enterprise.secure_file_serve.storage.EnterpriseFileStorage"
|
||||
}
|
||||
},
|
||||
)
|
||||
def test_exporting_csv_writes_file_to_storage_and_its_served_by_the_backend(
|
||||
enable_enterprise,
|
||||
enterprise_data_fixture,
|
||||
api_client,
|
||||
tmpdir,
|
||||
django_capture_on_commit_callbacks,
|
||||
):
|
||||
user = enterprise_data_fixture.create_user()
|
||||
table = enterprise_data_fixture.create_database_table(user=user)
|
||||
|
||||
storage = dummy_storage(tmpdir)
|
||||
|
||||
with patch("baserow.contrib.database.export.handler.default_storage", new=storage):
|
||||
token = enterprise_data_fixture.generate_token(user)
|
||||
with django_capture_on_commit_callbacks(execute=True):
|
||||
response = api_client.post(
|
||||
reverse(
|
||||
"api:database:export:export_table",
|
||||
kwargs={"table_id": table.id},
|
||||
),
|
||||
data={
|
||||
"exporter_type": "csv",
|
||||
"export_charset": "utf-8",
|
||||
"csv_include_header": "True",
|
||||
"csv_column_separator": ",",
|
||||
},
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
response_json = response.json()
|
||||
job_id = response_json["id"]
|
||||
response = api_client.get(
|
||||
reverse("api:database:export:get", kwargs={"job_id": job_id}),
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
json = response.json()
|
||||
|
||||
# The file is served by the backend
|
||||
assert json["url"].startswith("http://localhost:8000/api/files/")
|
||||
|
||||
# download it
|
||||
with patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
||||
):
|
||||
response = api_client.get(json["url"].replace("http://localhost:8000", ""))
|
||||
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(
|
||||
DEBUG=True,
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND=True,
|
||||
STORAGES={
|
||||
"default": {
|
||||
"BACKEND": "baserow_enterprise.secure_file_serve.storage.EnterpriseFileStorage"
|
||||
}
|
||||
},
|
||||
)
|
||||
def test_audit_log_can_export_to_csv_and_be_served_by_the_backend(
|
||||
api_client,
|
||||
enterprise_data_fixture,
|
||||
synced_roles,
|
||||
django_capture_on_commit_callbacks,
|
||||
tmpdir,
|
||||
):
|
||||
(
|
||||
admin_user,
|
||||
admin_token,
|
||||
) = enterprise_data_fixture.create_enterprise_admin_user_and_token()
|
||||
|
||||
csv_settings = {
|
||||
"csv_column_separator": ",",
|
||||
"csv_first_row_header": True,
|
||||
"export_charset": "utf-8",
|
||||
}
|
||||
|
||||
storage = dummy_storage(tmpdir)
|
||||
with patch("baserow.contrib.database.export.handler.default_storage", new=storage):
|
||||
with django_capture_on_commit_callbacks(execute=True):
|
||||
response = api_client.post(
|
||||
reverse("api:enterprise:audit_log:async_export"),
|
||||
data=csv_settings,
|
||||
format="json",
|
||||
HTTP_AUTHORIZATION=f"JWT {admin_token}",
|
||||
)
|
||||
assert response.status_code == HTTP_202_ACCEPTED, response.json()
|
||||
job = response.json()
|
||||
assert job["id"] is not None
|
||||
assert job["state"] == "pending"
|
||||
assert job["type"] == "audit_log_export"
|
||||
|
||||
admin_token = enterprise_data_fixture.generate_token(admin_user)
|
||||
response = api_client.get(
|
||||
reverse(
|
||||
"api:jobs:item",
|
||||
kwargs={"job_id": job["id"]},
|
||||
),
|
||||
HTTP_AUTHORIZATION=f"JWT {admin_token}",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
job = response.json()
|
||||
assert job["state"] == "finished"
|
||||
assert job["type"] == "audit_log_export"
|
||||
|
||||
# The file is served by the backend
|
||||
assert job["url"].startswith("http://localhost:8000/api/files/")
|
||||
|
||||
# download it
|
||||
with patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
||||
):
|
||||
response = api_client.get(job["url"].replace("http://localhost:8000", ""))
|
||||
|
||||
assert response.status_code == HTTP_200_OK
|
||||
|
||||
|
||||
@pytest.mark.django_db()
|
||||
@override_settings(
|
||||
DEBUG=True,
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND=True,
|
||||
STORAGES={
|
||||
"default": {
|
||||
"BACKEND": "baserow_enterprise.secure_file_serve.storage.EnterpriseFileStorage"
|
||||
}
|
||||
},
|
||||
)
|
||||
def test_files_can_be_downloaded_with_dl_query_param_as_filename(
|
||||
enable_enterprise, enterprise_data_fixture, api_client, tmpdir
|
||||
):
|
||||
_, token = enterprise_data_fixture.create_user_and_token()
|
||||
|
||||
storage = dummy_storage(tmpdir)
|
||||
with patch("baserow.core.user_files.handler.default_storage", new=storage):
|
||||
file = SimpleUploadedFile("test.txt", b"Hello World")
|
||||
response = api_client.post(
|
||||
reverse("api:user_files:upload_file"),
|
||||
data={"file": file},
|
||||
format="multipart",
|
||||
HTTP_AUTHORIZATION=f"JWT {token}",
|
||||
)
|
||||
|
||||
assert response.status_code == HTTP_200_OK, response.json()
|
||||
backend_file_url = response.json()["url"]
|
||||
file_name = response.json()["name"]
|
||||
|
||||
with patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
||||
):
|
||||
response = api_client.get(
|
||||
backend_file_url.replace("http://localhost:8000", ""),
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response.headers["Content-Disposition"] == f'inline; filename="{file_name}"'
|
||||
|
||||
with patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
||||
):
|
||||
response = api_client.get(
|
||||
backend_file_url.replace("http://localhost:8000", "") + "?dl=",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response.headers["Content-Disposition"] == f'inline; filename="{file_name}"'
|
||||
|
||||
with patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
||||
):
|
||||
response = api_client.get(
|
||||
backend_file_url.replace("http://localhost:8000", "") + "?dl=download.txt",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert (
|
||||
response.headers["Content-Disposition"] == 'attachment; filename="download.txt"'
|
||||
)
|
||||
|
||||
with patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage", new=storage
|
||||
):
|
||||
response = api_client.get(
|
||||
backend_file_url.replace("http://localhost:8000", "") + "?dl=1",
|
||||
)
|
||||
assert response.status_code == HTTP_200_OK
|
||||
assert response.headers["Content-Disposition"] == 'attachment; filename="1"'
|
|
@ -0,0 +1,59 @@
|
|||
from django.conf import settings
|
||||
from django.core.signing import BadSignature
|
||||
|
||||
import pytest
|
||||
|
||||
from baserow_enterprise.secure_file_serve.storage import (
|
||||
EnterpriseFileStorage,
|
||||
SecureFileServeSignerPayload,
|
||||
workspace_id_context,
|
||||
)
|
||||
|
||||
|
||||
def test_enterprise_storage_sign_data():
|
||||
storage = EnterpriseFileStorage()
|
||||
names = [None, "", "path/to/file.txt"]
|
||||
for name in names:
|
||||
signed_data = storage.sign_data(name=name)
|
||||
assert isinstance(signed_data, str)
|
||||
payload = storage.unsign_data(signed_data=signed_data)
|
||||
assert isinstance(payload, SecureFileServeSignerPayload)
|
||||
assert payload.name == name
|
||||
assert payload.workspace_id is None
|
||||
|
||||
|
||||
def test_enterprise_storage_sign_data_with_workspace_id():
|
||||
storage = EnterpriseFileStorage()
|
||||
name = "path/to/file.txt"
|
||||
|
||||
with workspace_id_context(workspace_id=1):
|
||||
signed_data = storage.sign_data(name=name)
|
||||
assert isinstance(signed_data, str)
|
||||
payload = storage.unsign_data(signed_data=signed_data)
|
||||
assert isinstance(payload, SecureFileServeSignerPayload)
|
||||
assert payload.name == name
|
||||
assert payload.workspace_id == 1
|
||||
|
||||
|
||||
def test_enterprise_storage_unsign_data_with_invalid_payload():
|
||||
storage = EnterpriseFileStorage()
|
||||
signed_data_samples = [None, "", "invalid_payload"]
|
||||
|
||||
for signed_data in signed_data_samples:
|
||||
with pytest.raises(BadSignature):
|
||||
storage.unsign_data(signed_data=signed_data)
|
||||
|
||||
|
||||
def test_enterprise_storage_get_signed_file_path():
|
||||
storage = EnterpriseFileStorage()
|
||||
for name in [None, "", "path/to/file.txt"]:
|
||||
signed_file_path = storage.get_signed_file_path(name=name)
|
||||
assert isinstance(signed_file_path, str)
|
||||
|
||||
|
||||
def test_enterprise_storage_url():
|
||||
storage = EnterpriseFileStorage()
|
||||
for name in [None, "", "path/to/file.txt"]:
|
||||
signed_file_path = storage.url(name=name)
|
||||
assert isinstance(signed_file_path, str)
|
||||
assert signed_file_path.startswith(settings.PUBLIC_BACKEND_URL)
|
|
@ -0,0 +1,140 @@
|
|||
from unittest import mock
|
||||
|
||||
from django.core.signing import SignatureExpired
|
||||
from django.test import override_settings
|
||||
|
||||
import pytest
|
||||
|
||||
from baserow_enterprise.secure_file_serve.constants import SecureFileServePermission
|
||||
from baserow_enterprise.secure_file_serve.exceptions import SecureFileServeException
|
||||
from baserow_enterprise.secure_file_serve.handler import (
|
||||
SecureFile,
|
||||
SecureFileServeHandler,
|
||||
)
|
||||
from baserow_enterprise.secure_file_serve.storage import (
|
||||
EnterpriseFileStorage,
|
||||
SecureFileServeSignerPayload,
|
||||
)
|
||||
|
||||
|
||||
def test_secure_file_handler_unsign_data_invalid_payload():
|
||||
for payload in [None, "", "invalid_payload"]:
|
||||
handler = SecureFileServeHandler()
|
||||
|
||||
with pytest.raises(SecureFileServeException) as error:
|
||||
handler.unsign_data(signed_path=payload)
|
||||
assert str(error.value) == "Invalid signature"
|
||||
|
||||
|
||||
@mock.patch("baserow_enterprise.secure_file_serve.handler.EnterpriseFileStorage")
|
||||
def test_secure_file_handler_unsign_data_expired_payload(mocked_storage):
|
||||
mocked_storage.unsign_data.side_effect = SignatureExpired()
|
||||
|
||||
handler = SecureFileServeHandler()
|
||||
storage = EnterpriseFileStorage()
|
||||
signed_data = storage.sign_data(name="path/to/file.txt")
|
||||
|
||||
with pytest.raises(SecureFileServeException) as error:
|
||||
handler.unsign_data(signed_path=signed_data)
|
||||
assert str(error.value) == "File expired"
|
||||
|
||||
|
||||
def test_secure_file_handler_unsign_valid_data():
|
||||
handler = SecureFileServeHandler()
|
||||
storage = EnterpriseFileStorage()
|
||||
signed_data = storage.sign_data(name="path/to/file.txt")
|
||||
expected_payload = SecureFileServeSignerPayload(
|
||||
name="path/to/file.txt", workspace_id=None
|
||||
)
|
||||
|
||||
payload = handler.unsign_data(signed_path=signed_data)
|
||||
assert payload == expected_payload
|
||||
|
||||
|
||||
def test_secure_file_handler_get_file_path_exists():
|
||||
handler = SecureFileServeHandler()
|
||||
|
||||
data = SecureFileServeSignerPayload(name="path/to/file.txt", workspace_id=None)
|
||||
|
||||
with mock.patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage"
|
||||
) as mocked_default_storage:
|
||||
mocked_default_storage.exists.return_value = True
|
||||
file_path = handler.get_file_path(data=data)
|
||||
assert file_path == data.name
|
||||
|
||||
|
||||
def test_secure_file_handler_get_file_path_does_not_exist():
|
||||
handler = SecureFileServeHandler()
|
||||
|
||||
data = SecureFileServeSignerPayload(name="path/to/file.txt", workspace_id=None)
|
||||
|
||||
with mock.patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage"
|
||||
) as mocked_default_storage:
|
||||
mocked_default_storage.exists.return_value = False
|
||||
with pytest.raises(SecureFileServeException) as error:
|
||||
handler.get_file_path(data=data)
|
||||
assert str(error.value) == "File not found"
|
||||
|
||||
|
||||
def test_secure_file_handler_get_file_name():
|
||||
handler = SecureFileServeHandler()
|
||||
|
||||
scenarios = [
|
||||
(None, ""),
|
||||
("", ""),
|
||||
("path/to/file.txt", "file.txt"),
|
||||
("file.txt", "file.txt"),
|
||||
]
|
||||
|
||||
for given_path, expected_name in scenarios:
|
||||
assert handler.get_file_name(given_path) == expected_name
|
||||
|
||||
|
||||
def test_secure_file_handler_extract_file_info_or_raise_invalid_payload():
|
||||
handler = SecureFileServeHandler()
|
||||
|
||||
with pytest.raises(SecureFileServeException) as error:
|
||||
handler.extract_file_info_or_raise(user=None, signed_data="")
|
||||
assert str(error.value) == "Invalid signature"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND_PERMISSION=SecureFileServePermission.DISABLED
|
||||
)
|
||||
def test_secure_file_handler_extract_file_info_or_raise_non_existing_file():
|
||||
handler = SecureFileServeHandler()
|
||||
|
||||
with mock.patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage"
|
||||
) as mocked_default_storage:
|
||||
mocked_default_storage.exists.return_value = False
|
||||
|
||||
with pytest.raises(SecureFileServeException) as error:
|
||||
handler.extract_file_info_or_raise(user=None, signed_data="")
|
||||
assert str(error.value) == "File not found"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(
|
||||
BASEROW_SERVE_FILES_THROUGH_BACKEND_PERMISSION=SecureFileServePermission.DISABLED
|
||||
)
|
||||
def test_secure_file_handler_extract_file_info_or_raise_valid_data():
|
||||
handler = SecureFileServeHandler()
|
||||
|
||||
storage = EnterpriseFileStorage()
|
||||
signed_data = storage.sign_data(name="path/to/file.txt")
|
||||
|
||||
with mock.patch(
|
||||
"baserow_enterprise.secure_file_serve.handler.default_storage"
|
||||
) as mocked_default_storage:
|
||||
mocked_default_storage.exists.return_value = True
|
||||
|
||||
secure_file = handler.extract_file_info_or_raise(
|
||||
user=None, signed_data=signed_data
|
||||
)
|
||||
assert isinstance(secure_file, SecureFile)
|
||||
assert secure_file.name == "file.txt"
|
||||
assert secure_file.path == "path/to/file.txt"
|
|
@ -4,7 +4,12 @@ import _ from 'lodash'
|
|||
|
||||
import AuthService from '@baserow/modules/core/services/auth'
|
||||
import WorkspaceService from '@baserow/modules/core/services/workspace'
|
||||
import { setToken, unsetToken } from '@baserow/modules/core/utils/auth'
|
||||
import {
|
||||
setToken,
|
||||
setUserSessionCookie,
|
||||
unsetToken,
|
||||
unsetUserSessionCookie,
|
||||
} from '@baserow/modules/core/utils/auth'
|
||||
import { unsetWorkspaceCookie } from '@baserow/modules/core/utils/workspace'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
|
||||
|
@ -17,6 +22,7 @@ export const state = () => ({
|
|||
refreshTokenPayload: null,
|
||||
permissions: [],
|
||||
user: null,
|
||||
signedUserSession: null,
|
||||
authenticated: false,
|
||||
additional: {},
|
||||
webSocketId: null,
|
||||
|
@ -36,6 +42,7 @@ export const mutations = {
|
|||
{
|
||||
access_token,
|
||||
refresh_token,
|
||||
user_session,
|
||||
user,
|
||||
permissions,
|
||||
tokenUpdatedAt,
|
||||
|
@ -45,6 +52,7 @@ export const mutations = {
|
|||
state.token = access_token
|
||||
state.refreshToken = refresh_token
|
||||
state.tokenUpdatedAt = tokenUpdatedAt || new Date().getTime()
|
||||
state.signedUserSession = user_session
|
||||
state.tokenPayload = jwtDecode(state.token)
|
||||
if (state.refreshToken) {
|
||||
state.refreshTokenPayload = jwtDecode(state.refreshToken)
|
||||
|
@ -136,6 +144,7 @@ export const actions = {
|
|||
|
||||
if (!getters.getPreventSetToken) {
|
||||
setToken(this.app, getters.refreshToken)
|
||||
setUserSessionCookie(this.app, getters.signedUserSession)
|
||||
}
|
||||
return data.user
|
||||
},
|
||||
|
@ -166,6 +175,7 @@ export const actions = {
|
|||
|
||||
if (data.refresh_token) {
|
||||
setToken(this.app, data.refresh_token)
|
||||
setUserSessionCookie(this.app, data.user_session)
|
||||
dispatch('setUserData', data)
|
||||
}
|
||||
},
|
||||
|
@ -187,6 +197,7 @@ export const actions = {
|
|||
},
|
||||
forceLogoff({ commit }) {
|
||||
unsetToken(this.app)
|
||||
unsetUserSessionCookie(this.app)
|
||||
unsetWorkspaceCookie(this.app)
|
||||
commit('LOGOFF')
|
||||
},
|
||||
|
@ -227,6 +238,7 @@ export const actions = {
|
|||
} catch (error) {
|
||||
if (error.response?.status === 401) {
|
||||
unsetToken(this.app)
|
||||
unsetUserSessionCookie(this.app)
|
||||
unsetWorkspaceCookie(this.app)
|
||||
if (getters.isAuthenticated) {
|
||||
dispatch('setUserSessionExpired', true)
|
||||
|
@ -283,6 +295,7 @@ export const actions = {
|
|||
},
|
||||
setUserSessionExpired({ commit }, value) {
|
||||
unsetToken(this.app)
|
||||
unsetUserSessionCookie(this.app)
|
||||
unsetWorkspaceCookie(this.app)
|
||||
commit('SET_USER_SESSION_EXPIRED', value)
|
||||
},
|
||||
|
@ -329,6 +342,9 @@ export const getters = {
|
|||
refreshTokenPayload(state) {
|
||||
return state.refreshTokenPayload
|
||||
},
|
||||
signedUserSession(state) {
|
||||
return state.signedUserSession
|
||||
},
|
||||
webSocketId(state) {
|
||||
return state.webSocketId
|
||||
},
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
import { isSecureURL } from '@baserow/modules/core/utils/string'
|
||||
import jwtDecode from 'jwt-decode'
|
||||
import { getDomain } from 'tldjs'
|
||||
|
||||
const cookieTokenName = 'jwt_token'
|
||||
export const userSourceCookieTokenName = 'user_source_token'
|
||||
export const userSessionCookieName = 'user_session'
|
||||
const refreshTokenMaxAge = 60 * 60 * 24 * 7
|
||||
|
||||
export const setToken = (
|
||||
{ $config, $cookies },
|
||||
|
@ -14,18 +17,67 @@ export const setToken = (
|
|||
const secure = isSecureURL($config.PUBLIC_WEB_FRONTEND_URL)
|
||||
$cookies.set(key, token, {
|
||||
path: '/',
|
||||
maxAge: 60 * 60 * 24 * 7,
|
||||
maxAge: refreshTokenMaxAge,
|
||||
sameSite:
|
||||
configuration.sameSite || $config.BASEROW_FRONTEND_SAME_SITE_COOKIE,
|
||||
secure,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a session cookie in the browser to store the user's signed session payload upon
|
||||
* login. This cookie facilitates backend authentication for GET requests, such as
|
||||
* downloading files with the secure_file_serve feature, when the Authorization header
|
||||
* is unavailable. The payload includes a token hash to invalidate the cookie upon
|
||||
* logout.
|
||||
*
|
||||
* @param {*} app: the nuxt app instance
|
||||
* @param {*} signedUserSession: the signed user session payload to be stored in the
|
||||
* cookie
|
||||
* @param {*} key: the cookie name
|
||||
* @param {*} configuration: the configuration object with the sameSite key
|
||||
* @returns
|
||||
*/
|
||||
export const setUserSessionCookie = (
|
||||
{ $config, $cookies },
|
||||
signedUserSession,
|
||||
key = userSessionCookieName,
|
||||
configuration = { sameSite: null }
|
||||
) => {
|
||||
if (process.SERVER_BUILD) return
|
||||
const secure = isSecureURL($config.PUBLIC_WEB_FRONTEND_URL)
|
||||
|
||||
// To make the cookie available to all subdomains, set the domain to the top-level
|
||||
// domain. This is necessary for the secure_file_serve feature to work across
|
||||
// subdomains, as when the backend serves files from a different subdomain from the
|
||||
// frontend. The top-level domain is extracted from the backend URL.
|
||||
// NOTE: For security reasons, it's not possible to set a cookie for a different
|
||||
// domain, so this won't work if the frontend and backend are on different domains.
|
||||
const topLevelDomain = getDomain($config.PUBLIC_BACKEND_URL)
|
||||
|
||||
$cookies.set(key, signedUserSession, {
|
||||
path: '/',
|
||||
maxAge: refreshTokenMaxAge,
|
||||
sameSite:
|
||||
configuration.sameSite || $config.BASEROW_FRONTEND_SAME_SITE_COOKIE,
|
||||
secure,
|
||||
domain: topLevelDomain,
|
||||
})
|
||||
}
|
||||
|
||||
export const unsetToken = ({ $cookies }, key = cookieTokenName) => {
|
||||
if (process.SERVER_BUILD) return
|
||||
$cookies.remove(key)
|
||||
}
|
||||
|
||||
export const unsetUserSessionCookie = (
|
||||
{ $cookies },
|
||||
key = userSessionCookieName
|
||||
) => {
|
||||
if (process.SERVER_BUILD) return
|
||||
$cookies.remove(key)
|
||||
}
|
||||
|
||||
export const getToken = ({ $cookies }, key = cookieTokenName) => {
|
||||
return $cookies.get(key)
|
||||
}
|
||||
|
|
|
@ -94,6 +94,7 @@
|
|||
"sass-loader": "10.4.1",
|
||||
"thenby": "^1.3.4",
|
||||
"tiptap-markdown": "^0.8.9",
|
||||
"tldjs": "^2.3.1",
|
||||
"uuid": "9.0.0",
|
||||
"vue-chartjs": "4.1.2",
|
||||
"vue2-smooth-scroll": "^1.6.0",
|
||||
|
|
|
@ -18001,6 +18001,13 @@ tiptap-markdown@^0.8.9:
|
|||
markdown-it-task-lists "^2.1.1"
|
||||
prosemirror-markdown "^1.11.1"
|
||||
|
||||
tldjs@^2.3.1:
|
||||
version "2.3.1"
|
||||
resolved "https://registry.yarnpkg.com/tldjs/-/tldjs-2.3.1.tgz#cf09c3eb5d7403a9e214b7d65f3cf9651c0ab039"
|
||||
integrity sha512-W/YVH/QczLUxVjnQhFC61Iq232NWu3TqDdO0S/MtXVz4xybejBov4ud+CIwN9aYqjOecEqIy0PscGkwpG9ZyTw==
|
||||
dependencies:
|
||||
punycode "^1.4.1"
|
||||
|
||||
tmp@^0.0.33:
|
||||
version "0.0.33"
|
||||
resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue