s3_storage_url}\">Check S3 Configuration";
+ }
+
+ return new PushoverMessage(
+ title: 'Database backup succeeded locally, S3 upload failed',
+ level: 'warning',
+ message: $message,
+ );
+ }
+
+ public function toSlack(): SlackMessage
+ {
+ $title = 'Database backup succeeded locally, S3 upload failed';
+ $description = "Database backup for {$this->name} (db:{$this->database_name}) was created successfully on local storage, but failed to upload to S3.";
+
+ $description .= "\n\n*Frequency:* {$this->frequency}";
+ $description .= "\n\n*S3 Error:* {$this->s3_error}";
+
+ if ($this->s3_storage_url) {
+ $description .= "\n\n*S3 Storage:* <{$this->s3_storage_url}|Check Configuration>";
+ }
+
+ return new SlackMessage(
+ title: $title,
+ description: $description,
+ color: SlackMessage::warningColor()
+ );
+ }
+}
diff --git a/app/Rules/DockerImageFormat.php b/app/Rules/DockerImageFormat.php
new file mode 100644
index 000000000..a6a78a76c
--- /dev/null
+++ b/app/Rules/DockerImageFormat.php
@@ -0,0 +1,41 @@
+', '\n', '\r', '\0', '"', "'",
- '\\', '!', '?', '*', '~', '^', '%', '=', '+',
+ '\\', '!', '?', '*', '^', '%', '=', '+',
'#', // Comment character that could hide commands
];
@@ -85,7 +85,7 @@ public function validate(string $attribute, mixed $value, Closure $fail): void
}
// Validate SSH URL format (git@host:user/repo.git)
- if (! preg_match('/^git@[a-zA-Z0-9\.\-]+:[a-zA-Z0-9\-_\/\.]+$/', $value)) {
+ if (! preg_match('/^git@[a-zA-Z0-9\.\-]+:[a-zA-Z0-9\-_\/\.~]+$/', $value)) {
$fail('The :attribute is not a valid SSH repository URL.');
return;
@@ -136,14 +136,14 @@ public function validate(string $attribute, mixed $value, Closure $fail): void
// Validate path contains only safe characters
$path = $parsed['path'] ?? '';
- if (! empty($path) && ! preg_match('/^[a-zA-Z0-9\-_\/\.]+$/', $path)) {
+ if (! empty($path) && ! preg_match('/^[a-zA-Z0-9\-_\/\.@~]+$/', $path)) {
$fail('The :attribute path contains invalid characters.');
return;
}
} elseif (str_starts_with($value, 'git://')) {
- // Validate git:// protocol URL
- if (! preg_match('/^git:\/\/[a-zA-Z0-9\.\-]+\/[a-zA-Z0-9\-_\/\.]+$/', $value)) {
+ // Validate git:// protocol URL (supports both git://host/path and git://host:port/path with tilde)
+ if (! preg_match('/^git:\/\/[a-zA-Z0-9\.\-]+(:[0-9]+)?[:\/][a-zA-Z0-9\-_\/\.~]+$/', $value)) {
$fail('The :attribute is not a valid git:// URL.');
return;
diff --git a/app/Services/DockerImageParser.php b/app/Services/DockerImageParser.php
index 1fd6625b3..b483c979a 100644
--- a/app/Services/DockerImageParser.php
+++ b/app/Services/DockerImageParser.php
@@ -10,20 +10,33 @@ class DockerImageParser
private string $tag = 'latest';
+ private bool $isImageHash = false;
+
public function parse(string $imageString): self
{
- // First split by : to handle the tag, but be careful with registry ports
- $lastColon = strrpos($imageString, ':');
- $hasSlash = str_contains($imageString, '/');
-
- // If the last colon appears after the last slash, it's a tag
- // Otherwise it might be a port in the registry URL
- if ($lastColon !== false && (! $hasSlash || $lastColon > strrpos($imageString, '/'))) {
- $mainPart = substr($imageString, 0, $lastColon);
- $this->tag = substr($imageString, $lastColon + 1);
+ // Check for @sha256: format first (e.g., nginx@sha256:abc123...)
+ if (preg_match('/^(.+)@sha256:([a-f0-9]{64})$/i', $imageString, $matches)) {
+ $mainPart = $matches[1];
+ $this->tag = $matches[2];
+ $this->isImageHash = true;
} else {
- $mainPart = $imageString;
- $this->tag = 'latest';
+ // Split by : to handle the tag, but be careful with registry ports
+ $lastColon = strrpos($imageString, ':');
+ $hasSlash = str_contains($imageString, '/');
+
+ // If the last colon appears after the last slash, it's a tag
+ // Otherwise it might be a port in the registry URL
+ if ($lastColon !== false && (! $hasSlash || $lastColon > strrpos($imageString, '/'))) {
+ $mainPart = substr($imageString, 0, $lastColon);
+ $this->tag = substr($imageString, $lastColon + 1);
+
+ // Check if the tag is a SHA256 hash
+ $this->isImageHash = $this->isSha256Hash($this->tag);
+ } else {
+ $mainPart = $imageString;
+ $this->tag = 'latest';
+ $this->isImageHash = false;
+ }
}
// Split the main part by / to handle registry and image name
@@ -41,6 +54,37 @@ public function parse(string $imageString): self
return $this;
}
+ /**
+ * Check if the given string is a SHA256 hash
+ */
+ private function isSha256Hash(string $hash): bool
+ {
+ // SHA256 hashes are 64 characters long and contain only hexadecimal characters
+ return preg_match('/^[a-f0-9]{64}$/i', $hash) === 1;
+ }
+
+ /**
+ * Check if the current tag is an image hash
+ */
+ public function isImageHash(): bool
+ {
+ return $this->isImageHash;
+ }
+
+ /**
+ * Get the full image name with hash if present
+ */
+ public function getFullImageNameWithHash(): string
+ {
+ $imageName = $this->getFullImageNameWithoutTag();
+
+ if ($this->isImageHash) {
+ return $imageName.'@sha256:'.$this->tag;
+ }
+
+ return $imageName.':'.$this->tag;
+ }
+
public function getFullImageNameWithoutTag(): string
{
if ($this->registryUrl) {
@@ -73,6 +117,10 @@ public function toString(): string
}
$parts[] = $this->imageName;
+ if ($this->isImageHash) {
+ return implode('/', $parts).'@sha256:'.$this->tag;
+ }
+
return implode('/', $parts).':'.$this->tag;
}
}
diff --git a/app/Traits/ClearsGlobalSearchCache.php b/app/Traits/ClearsGlobalSearchCache.php
index 0bcc5d319..b9af70aba 100644
--- a/app/Traits/ClearsGlobalSearchCache.php
+++ b/app/Traits/ClearsGlobalSearchCache.php
@@ -3,79 +3,126 @@
namespace App\Traits;
use App\Livewire\GlobalSearch;
+use Illuminate\Database\Eloquent\Model;
trait ClearsGlobalSearchCache
{
protected static function bootClearsGlobalSearchCache()
{
static::saving(function ($model) {
- // Only clear cache if searchable fields are being changed
- if ($model->hasSearchableChanges()) {
- $teamId = $model->getTeamIdForCache();
- if (filled($teamId)) {
- GlobalSearch::clearTeamCache($teamId);
+ try {
+ // Only clear cache if searchable fields are being changed
+ if ($model->hasSearchableChanges()) {
+ $teamId = $model->getTeamIdForCache();
+ if (filled($teamId)) {
+ GlobalSearch::clearTeamCache($teamId);
+ }
}
+ } catch (\Throwable $e) {
+ // Silently fail cache clearing - don't break the save operation
+ ray('Failed to clear global search cache on saving: '.$e->getMessage());
}
});
static::created(function ($model) {
- // Always clear cache when model is created
- $teamId = $model->getTeamIdForCache();
- if (filled($teamId)) {
- GlobalSearch::clearTeamCache($teamId);
+ try {
+ // Always clear cache when model is created
+ $teamId = $model->getTeamIdForCache();
+ if (filled($teamId)) {
+ GlobalSearch::clearTeamCache($teamId);
+ }
+ } catch (\Throwable $e) {
+ // Silently fail cache clearing - don't break the create operation
+ ray('Failed to clear global search cache on creation: '.$e->getMessage());
}
});
static::deleted(function ($model) {
- // Always clear cache when model is deleted
- $teamId = $model->getTeamIdForCache();
- if (filled($teamId)) {
- GlobalSearch::clearTeamCache($teamId);
+ try {
+ // Always clear cache when model is deleted
+ $teamId = $model->getTeamIdForCache();
+ if (filled($teamId)) {
+ GlobalSearch::clearTeamCache($teamId);
+ }
+ } catch (\Throwable $e) {
+ // Silently fail cache clearing - don't break the delete operation
+ ray('Failed to clear global search cache on deletion: '.$e->getMessage());
}
});
}
private function hasSearchableChanges(): bool
{
- // Define searchable fields based on model type
- $searchableFields = ['name', 'description'];
+ try {
+ // Define searchable fields based on model type
+ $searchableFields = ['name', 'description'];
- // Add model-specific searchable fields
- if ($this instanceof \App\Models\Application) {
- $searchableFields[] = 'fqdn';
- $searchableFields[] = 'docker_compose_domains';
- } elseif ($this instanceof \App\Models\Server) {
- $searchableFields[] = 'ip';
- } elseif ($this instanceof \App\Models\Service) {
- // Services don't have direct fqdn, but name and description are covered
- }
- // Database models only have name and description as searchable
-
- // Check if any searchable field is dirty
- foreach ($searchableFields as $field) {
- if ($this->isDirty($field)) {
- return true;
+ // Add model-specific searchable fields
+ if ($this instanceof \App\Models\Application) {
+ $searchableFields[] = 'fqdn';
+ $searchableFields[] = 'docker_compose_domains';
+ } elseif ($this instanceof \App\Models\Server) {
+ $searchableFields[] = 'ip';
+ } elseif ($this instanceof \App\Models\Service) {
+ // Services don't have direct fqdn, but name and description are covered
+ } elseif ($this instanceof \App\Models\Project || $this instanceof \App\Models\Environment) {
+ // Projects and environments only have name and description as searchable
}
- }
+ // Database models only have name and description as searchable
- return false;
+ // Check if any searchable field is dirty
+ foreach ($searchableFields as $field) {
+ // Check if attribute exists before checking if dirty
+ if (array_key_exists($field, $this->getAttributes()) && $this->isDirty($field)) {
+ return true;
+ }
+ }
+
+ return false;
+ } catch (\Throwable $e) {
+ // If checking changes fails, assume changes exist to be safe
+ ray('Failed to check searchable changes: '.$e->getMessage());
+
+ return true;
+ }
}
private function getTeamIdForCache()
{
- // For database models, team is accessed through environment.project.team
- if (method_exists($this, 'team')) {
- $team = $this->team();
- if (filled($team)) {
- return is_object($team) ? $team->id : null;
+ try {
+ // For Project models (has direct team_id)
+ if ($this instanceof \App\Models\Project) {
+ return $this->team_id ?? null;
}
- }
- // For models with direct team_id property
- if (property_exists($this, 'team_id') || isset($this->team_id)) {
- return $this->team_id;
- }
+ // For Environment models (get team_id through project)
+ if ($this instanceof \App\Models\Environment) {
+ return $this->project?->team_id;
+ }
- return null;
+ // For database models, team is accessed through environment.project.team
+ if (method_exists($this, 'team')) {
+ if ($this instanceof \App\Models\Server) {
+ $team = $this->team;
+ } else {
+ $team = $this->team();
+ }
+ if (filled($team)) {
+ return is_object($team) ? $team->id : null;
+ }
+ }
+
+ // For models with direct team_id property
+ if (property_exists($this, 'team_id') || isset($this->team_id)) {
+ return $this->team_id ?? null;
+ }
+
+ return null;
+ } catch (\Throwable $e) {
+ // If we can't determine team ID, return null
+ ray('Failed to get team ID for cache: '.$e->getMessage());
+
+ return null;
+ }
}
}
diff --git a/app/Traits/EnvironmentVariableAnalyzer.php b/app/Traits/EnvironmentVariableAnalyzer.php
new file mode 100644
index 000000000..0b452a940
--- /dev/null
+++ b/app/Traits/EnvironmentVariableAnalyzer.php
@@ -0,0 +1,221 @@
+ [
+ 'problematic_values' => ['production', 'prod'],
+ 'affects' => 'Node.js/npm/yarn/bun/pnpm',
+ 'issue' => 'Skips devDependencies installation which are often required for building (webpack, typescript, etc.)',
+ 'recommendation' => 'Uncheck "Available at Buildtime" or use "development" during build',
+ ],
+ 'NPM_CONFIG_PRODUCTION' => [
+ 'problematic_values' => ['true', '1', 'yes'],
+ 'affects' => 'npm/pnpm',
+ 'issue' => 'Forces npm to skip devDependencies',
+ 'recommendation' => 'Remove from build-time variables or set to false',
+ ],
+ 'YARN_PRODUCTION' => [
+ 'problematic_values' => ['true', '1', 'yes'],
+ 'affects' => 'Yarn/pnpm',
+ 'issue' => 'Forces yarn to skip devDependencies',
+ 'recommendation' => 'Remove from build-time variables or set to false',
+ ],
+ 'COMPOSER_NO_DEV' => [
+ 'problematic_values' => ['1', 'true', 'yes'],
+ 'affects' => 'PHP/Composer',
+ 'issue' => 'Skips require-dev packages which may include build tools',
+ 'recommendation' => 'Set as "Runtime only" or remove from build-time variables',
+ ],
+ 'MIX_ENV' => [
+ 'problematic_values' => ['prod', 'production'],
+ 'affects' => 'Elixir/Phoenix',
+ 'issue' => 'Production mode may skip development dependencies needed for compilation',
+ 'recommendation' => 'Use "dev" for build or set as "Runtime only"',
+ ],
+ 'RAILS_ENV' => [
+ 'problematic_values' => ['production'],
+ 'affects' => 'Ruby on Rails',
+ 'issue' => 'May affect asset precompilation and dependency handling',
+ 'recommendation' => 'Consider using "development" for build phase',
+ ],
+ 'RACK_ENV' => [
+ 'problematic_values' => ['production'],
+ 'affects' => 'Ruby/Rack',
+ 'issue' => 'May affect dependency handling and build behavior',
+ 'recommendation' => 'Consider using "development" for build phase',
+ ],
+ 'BUNDLE_WITHOUT' => [
+ 'problematic_values' => ['development', 'test', 'development:test'],
+ 'affects' => 'Ruby/Bundler',
+ 'issue' => 'Excludes gem groups that may contain build dependencies',
+ 'recommendation' => 'Remove from build-time variables or adjust groups',
+ ],
+ 'FLASK_ENV' => [
+ 'problematic_values' => ['production'],
+ 'affects' => 'Python/Flask',
+ 'issue' => 'May affect debug mode and development tools availability',
+ 'recommendation' => 'Usually safe, but consider "development" for complex builds',
+ ],
+ 'DJANGO_SETTINGS_MODULE' => [
+ 'problematic_values' => [], // Check if contains 'production' or 'prod'
+ 'affects' => 'Python/Django',
+ 'issue' => 'Production settings may disable debug tools needed during build',
+ 'recommendation' => 'Use development settings for build phase',
+ 'check_function' => 'checkDjangoSettings',
+ ],
+ 'APP_ENV' => [
+ 'problematic_values' => ['production', 'prod'],
+ 'affects' => 'Laravel/Symfony',
+ 'issue' => 'May affect dependency installation and build optimizations',
+ 'recommendation' => 'Consider using "local" or "development" for build',
+ ],
+ 'ASPNETCORE_ENVIRONMENT' => [
+ 'problematic_values' => ['Production'],
+ 'affects' => '.NET/ASP.NET Core',
+ 'issue' => 'May affect build-time configurations and optimizations',
+ 'recommendation' => 'Usually safe, but verify build requirements',
+ ],
+ 'CI' => [
+ 'problematic_values' => ['true', '1', 'yes'],
+ 'affects' => 'Various tools',
+ 'issue' => 'Changes behavior in many tools (disables interactivity, changes caching)',
+ 'recommendation' => 'Usually beneficial for builds, but be aware of behavior changes',
+ ],
+ ];
+ }
+
+ /**
+ * Analyze an environment variable for potential build issues.
+ * Always returns a warning if the key is in our list, regardless of value.
+ */
+ public static function analyzeBuildVariable(string $key, string $value): ?array
+ {
+ $problematicVars = self::getProblematicBuildVariables();
+
+ // Direct key match
+ if (isset($problematicVars[$key])) {
+ $config = $problematicVars[$key];
+
+ // Check if it has a custom check function
+ if (isset($config['check_function'])) {
+ $method = $config['check_function'];
+ if (method_exists(self::class, $method)) {
+ return self::{$method}($key, $value, $config);
+ }
+ }
+
+ // Always return warning for known problematic variables
+ return [
+ 'variable' => $key,
+ 'value' => $value,
+ 'affects' => $config['affects'],
+ 'issue' => $config['issue'],
+ 'recommendation' => $config['recommendation'],
+ ];
+ }
+
+ return null;
+ }
+
+ /**
+ * Analyze multiple environment variables for potential build issues.
+ */
+ public static function analyzeBuildVariables(array $variables): array
+ {
+ $warnings = [];
+
+ foreach ($variables as $key => $value) {
+ $warning = self::analyzeBuildVariable($key, $value);
+ if ($warning) {
+ $warnings[] = $warning;
+ }
+ }
+
+ return $warnings;
+ }
+
+ /**
+ * Custom check for Django settings module.
+ */
+ protected static function checkDjangoSettings(string $key, string $value, array $config): ?array
+ {
+ // Always return warning for DJANGO_SETTINGS_MODULE when it's set as build-time
+ return [
+ 'variable' => $key,
+ 'value' => $value,
+ 'affects' => $config['affects'],
+ 'issue' => $config['issue'],
+ 'recommendation' => $config['recommendation'],
+ ];
+ }
+
+ /**
+ * Generate a formatted warning message for deployment logs.
+ */
+ public static function formatBuildWarning(array $warning): array
+ {
+ $messages = [
+ "⚠️ Build-time environment variable warning: {$warning['variable']}={$warning['value']}",
+ " Affects: {$warning['affects']}",
+ " Issue: {$warning['issue']}",
+ " Recommendation: {$warning['recommendation']}",
+ ];
+
+ return $messages;
+ }
+
+ /**
+ * Check if a variable should show a warning in the UI.
+ */
+ public static function shouldShowBuildWarning(string $key): bool
+ {
+ return isset(self::getProblematicBuildVariables()[$key]);
+ }
+
+ /**
+ * Get UI warning message for a specific variable.
+ */
+ public static function getUIWarningMessage(string $key): ?string
+ {
+ $problematicVars = self::getProblematicBuildVariables();
+
+ if (! isset($problematicVars[$key])) {
+ return null;
+ }
+
+ $config = $problematicVars[$key];
+ $problematicValuesStr = implode(', ', $config['problematic_values']);
+
+ return "Setting {$key} to {$problematicValuesStr} as a build-time variable may cause issues. {$config['issue']} Consider: {$config['recommendation']}";
+ }
+
+ /**
+ * Get problematic variables configuration for frontend use.
+ */
+ public static function getProblematicVariablesForFrontend(): array
+ {
+ $vars = self::getProblematicBuildVariables();
+ $result = [];
+
+ foreach ($vars as $key => $config) {
+ // Skip the check_function as it's PHP-specific
+ $result[$key] = [
+ 'problematic_values' => $config['problematic_values'],
+ 'affects' => $config['affects'],
+ 'issue' => $config['issue'],
+ 'recommendation' => $config['recommendation'],
+ ];
+ }
+
+ return $result;
+ }
+}
diff --git a/app/Traits/ExecuteRemoteCommand.php b/app/Traits/ExecuteRemoteCommand.php
index 0c3414efe..4aa5aae8b 100644
--- a/app/Traits/ExecuteRemoteCommand.php
+++ b/app/Traits/ExecuteRemoteCommand.php
@@ -17,6 +17,46 @@ trait ExecuteRemoteCommand
public static int $batch_counter = 0;
+ private function redact_sensitive_info($text)
+ {
+ $text = remove_iip($text);
+
+ if (! isset($this->application)) {
+ return $text;
+ }
+
+ $lockedVars = collect([]);
+
+ if (isset($this->application->environment_variables)) {
+ $lockedVars = $lockedVars->merge(
+ $this->application->environment_variables
+ ->where('is_shown_once', true)
+ ->pluck('real_value', 'key')
+ ->filter()
+ );
+ }
+
+ if (isset($this->pull_request_id) && $this->pull_request_id !== 0 && isset($this->application->environment_variables_preview)) {
+ $lockedVars = $lockedVars->merge(
+ $this->application->environment_variables_preview
+ ->where('is_shown_once', true)
+ ->pluck('real_value', 'key')
+ ->filter()
+ );
+ }
+
+ foreach ($lockedVars as $key => $value) {
+ $escapedValue = preg_quote($value, '/');
+ $text = preg_replace(
+ '/'.$escapedValue.'/',
+ REDACTED,
+ $text
+ );
+ }
+
+ return $text;
+ }
+
public function execute_remote_command(...$commands)
{
static::$batch_counter++;
@@ -74,7 +114,7 @@ public function execute_remote_command(...$commands)
// Track SSH retry event in Sentry
$this->trackSshRetryEvent($attempt, $maxRetries, $delay, $errorMessage, [
'server' => $this->server->name ?? $this->server->ip ?? 'unknown',
- 'command' => remove_iip($command),
+ 'command' => $this->redact_sensitive_info($command),
'trait' => 'ExecuteRemoteCommand',
]);
@@ -115,7 +155,7 @@ public function execute_remote_command(...$commands)
private function executeCommandWithProcess($command, $hidden, $customType, $append, $ignore_errors)
{
$remote_command = SshMultiplexingHelper::generateSshCommand($this->server, $command);
- $process = Process::timeout(3600)->idleTimeout(3600)->start($remote_command, function (string $type, string $output) use ($command, $hidden, $customType, $append) {
+ $process = Process::timeout(config('constants.ssh.command_timeout'))->idleTimeout(3600)->start($remote_command, function (string $type, string $output) use ($command, $hidden, $customType, $append) {
$output = str($output)->trim();
if ($output->startsWith('╔')) {
$output = "\n".$output;
@@ -125,8 +165,8 @@ private function executeCommandWithProcess($command, $hidden, $customType, $appe
$sanitized_output = sanitize_utf8_text($output);
$new_log_entry = [
- 'command' => remove_iip($command),
- 'output' => remove_iip($sanitized_output),
+ 'command' => $this->redact_sensitive_info($command),
+ 'output' => $this->redact_sensitive_info($sanitized_output),
'type' => $customType ?? $type === 'err' ? 'stderr' : 'stdout',
'timestamp' => Carbon::now('UTC'),
'hidden' => $hidden,
@@ -162,13 +202,13 @@ private function executeCommandWithProcess($command, $hidden, $customType, $appe
if ($this->save) {
if (data_get($this->saved_outputs, $this->save, null) === null) {
- data_set($this->saved_outputs, $this->save, str());
+ $this->saved_outputs->put($this->save, str());
}
if ($append) {
- $this->saved_outputs[$this->save] .= str($sanitized_output)->trim();
- $this->saved_outputs[$this->save] = str($this->saved_outputs[$this->save]);
+ $current_value = $this->saved_outputs->get($this->save);
+ $this->saved_outputs->put($this->save, str($current_value.str($sanitized_output)->trim()));
} else {
- $this->saved_outputs[$this->save] = str($sanitized_output)->trim();
+ $this->saved_outputs->put($this->save, str($sanitized_output)->trim());
}
}
});
@@ -194,7 +234,7 @@ private function addRetryLogEntry(int $attempt, int $maxRetries, int $delay, str
$retryMessage = "SSH connection failed. Retrying... (Attempt {$attempt}/{$maxRetries}, waiting {$delay}s)\nError: {$errorMessage}";
$new_log_entry = [
- 'output' => remove_iip($retryMessage),
+ 'output' => $this->redact_sensitive_info($retryMessage),
'type' => 'stdout',
'timestamp' => Carbon::now('UTC'),
'hidden' => false,
diff --git a/bootstrap/helpers/constants.php b/bootstrap/helpers/constants.php
index b568e090c..36243e119 100644
--- a/bootstrap/helpers/constants.php
+++ b/bootstrap/helpers/constants.php
@@ -21,13 +21,23 @@
'bitnami/mariadb',
'bitnami/mongodb',
'bitnami/redis',
+ 'bitnamilegacy/mariadb',
+ 'bitnamilegacy/mongodb',
+ 'bitnamilegacy/redis',
+ 'bitnamisecure/mariadb',
+ 'bitnamisecure/mongodb',
+ 'bitnamisecure/redis',
'mysql',
'bitnami/mysql',
+ 'bitnamilegacy/mysql',
+ 'bitnamisecure/mysql',
'mysql/mysql-server',
'mariadb',
'postgis/postgis',
'postgres',
'bitnami/postgresql',
+ 'bitnamilegacy/postgresql',
+ 'bitnamisecure/postgresql',
'supabase/postgres',
'elestio/postgres',
'mongo',
diff --git a/bootstrap/helpers/databases.php b/bootstrap/helpers/databases.php
index 5dbd46b5e..aa7be3236 100644
--- a/bootstrap/helpers/databases.php
+++ b/bootstrap/helpers/databases.php
@@ -237,12 +237,11 @@ function removeOldBackups($backup): void
{
try {
if ($backup->executions) {
- // If local backup is disabled, mark all executions as having local storage deleted
- if ($backup->disable_local_backup && $backup->save_s3) {
- $backup->executions()
- ->where('local_storage_deleted', false)
- ->update(['local_storage_deleted' => true]);
- } else {
+ // Delete old local backups (only if local backup is NOT disabled)
+ // Note: When disable_local_backup is enabled, each execution already marks its own
+ // local_storage_deleted status at the time of backup, so we don't need to retroactively
+ // update old executions
+ if (! $backup->disable_local_backup) {
$localBackupsToDelete = deleteOldBackupsLocally($backup);
if ($localBackupsToDelete->isNotEmpty()) {
$backup->executions()
@@ -261,18 +260,18 @@ function removeOldBackups($backup): void
}
}
- // Delete executions where both local and S3 storage are marked as deleted
- // or where only S3 is enabled and S3 storage is deleted
- if ($backup->disable_local_backup && $backup->save_s3) {
- $backup->executions()
- ->where('s3_storage_deleted', true)
- ->delete();
- } else {
- $backup->executions()
- ->where('local_storage_deleted', true)
- ->where('s3_storage_deleted', true)
- ->delete();
- }
+ // Delete execution records where all backup copies are gone
+ // Case 1: Both local and S3 backups are deleted
+ $backup->executions()
+ ->where('local_storage_deleted', true)
+ ->where('s3_storage_deleted', true)
+ ->delete();
+
+ // Case 2: Local backup is deleted and S3 was never used (s3_uploaded is null)
+ $backup->executions()
+ ->where('local_storage_deleted', true)
+ ->whereNull('s3_uploaded')
+ ->delete();
} catch (\Exception $e) {
throw $e;
diff --git a/bootstrap/helpers/docker.php b/bootstrap/helpers/docker.php
index 1491e4712..b63c3fc3b 100644
--- a/bootstrap/helpers/docker.php
+++ b/bootstrap/helpers/docker.php
@@ -1119,3 +1119,53 @@ function escapeDollarSign($value)
return str_replace($search, $replace, $value);
}
+
+/**
+ * Generate Docker build arguments from environment variables collection
+ * Returns only keys (no values) since values are sourced from environment via export
+ *
+ * @param \Illuminate\Support\Collection|array $variables Collection of variables with 'key', 'value', and optionally 'is_multiline'
+ * @return \Illuminate\Support\Collection Collection of formatted --build-arg strings (keys only)
+ */
+function generateDockerBuildArgs($variables): \Illuminate\Support\Collection
+{
+ $variables = collect($variables);
+
+ return $variables->map(function ($var) {
+ $key = is_array($var) ? data_get($var, 'key') : $var->key;
+
+ // Only return the key - Docker will get the value from the environment
+ return "--build-arg {$key}";
+ });
+}
+
+/**
+ * Generate Docker environment flags from environment variables collection
+ *
+ * @param \Illuminate\Support\Collection|array $variables Collection of variables with 'key', 'value', and optionally 'is_multiline'
+ * @return string Space-separated environment flags
+ */
+function generateDockerEnvFlags($variables): string
+{
+ $variables = collect($variables);
+
+ return $variables
+ ->map(function ($var) {
+ $key = is_array($var) ? data_get($var, 'key') : $var->key;
+ $value = is_array($var) ? data_get($var, 'value') : $var->value;
+ $isMultiline = is_array($var) ? data_get($var, 'is_multiline', false) : ($var->is_multiline ?? false);
+
+ if ($isMultiline) {
+ // For multiline variables, strip surrounding quotes and escape for bash
+ $raw_value = trim($value, "'");
+ $escaped_value = str_replace(['\\', '"', '$', '`'], ['\\\\', '\\"', '\\$', '\\`'], $raw_value);
+
+ return "-e {$key}=\"{$escaped_value}\"";
+ }
+
+ $escaped_value = escapeshellarg($value);
+
+ return "-e {$key}={$escaped_value}";
+ })
+ ->implode(' ');
+}
diff --git a/bootstrap/helpers/github.php b/bootstrap/helpers/github.php
index 0de2f2fd9..3b5f183fb 100644
--- a/bootstrap/helpers/github.php
+++ b/bootstrap/helpers/github.php
@@ -135,7 +135,13 @@ function getPermissionsPath(GithubApp $source)
function loadRepositoryByPage(GithubApp $source, string $token, int $page)
{
- $response = Http::withToken($token)->get("{$source->api_url}/installation/repositories?per_page=100&page={$page}");
+ $response = Http::GitHub($source->api_url, $token)
+ ->timeout(20)
+ ->retry(3, 200, throw: false)
+ ->get('/installation/repositories', [
+ 'per_page' => 100,
+ 'page' => $page,
+ ]);
$json = $response->json();
if ($response->status() !== 200) {
return [
diff --git a/bootstrap/helpers/parsers.php b/bootstrap/helpers/parsers.php
index d4701d251..a588ed882 100644
--- a/bootstrap/helpers/parsers.php
+++ b/bootstrap/helpers/parsers.php
@@ -385,21 +385,34 @@ function applicationParser(Application $resource, int $pull_request_id = 0, ?int
'is_preview' => false,
]);
if ($resource->build_pack === 'dockercompose') {
- $domains = collect(json_decode(data_get($resource, 'docker_compose_domains'))) ?? collect([]);
- $domainExists = data_get($domains->get($fqdnFor), 'domain');
- $envExists = $resource->environment_variables()->where('key', $key->value())->first();
- if (str($domainExists)->replace('http://', '')->replace('https://', '')->value() !== $envExists->value) {
- $envExists->update([
- 'value' => $url,
- ]);
+ // Check if a service with this name actually exists
+ $serviceExists = false;
+ foreach ($services as $serviceName => $service) {
+ $transformedServiceName = str($serviceName)->replace('-', '_')->replace('.', '_')->value();
+ if ($transformedServiceName === $fqdnFor) {
+ $serviceExists = true;
+ break;
+ }
}
- if (is_null($domainExists)) {
- // Put URL in the domains array instead of FQDN
- $domains->put((string) $fqdnFor, [
- 'domain' => $url,
- ]);
- $resource->docker_compose_domains = $domains->toJson();
- $resource->save();
+
+ // Only add domain if the service exists
+ if ($serviceExists) {
+ $domains = collect(json_decode(data_get($resource, 'docker_compose_domains'))) ?? collect([]);
+ $domainExists = data_get($domains->get($fqdnFor), 'domain');
+ $envExists = $resource->environment_variables()->where('key', $key->value())->first();
+ if (str($domainExists)->replace('http://', '')->replace('https://', '')->value() !== $envExists->value) {
+ $envExists->update([
+ 'value' => $url,
+ ]);
+ }
+ if (is_null($domainExists)) {
+ // Put URL in the domains array instead of FQDN
+ $domains->put((string) $fqdnFor, [
+ 'domain' => $url,
+ ]);
+ $resource->docker_compose_domains = $domains->toJson();
+ $resource->save();
+ }
}
}
} elseif ($command->value() === 'URL') {
@@ -418,20 +431,33 @@ function applicationParser(Application $resource, int $pull_request_id = 0, ?int
'is_preview' => false,
]);
if ($resource->build_pack === 'dockercompose') {
- $domains = collect(json_decode(data_get($resource, 'docker_compose_domains'))) ?? collect([]);
- $domainExists = data_get($domains->get($urlFor), 'domain');
- $envExists = $resource->environment_variables()->where('key', $key->value())->first();
- if ($domainExists !== $envExists->value) {
- $envExists->update([
- 'value' => $url,
- ]);
+ // Check if a service with this name actually exists
+ $serviceExists = false;
+ foreach ($services as $serviceName => $service) {
+ $transformedServiceName = str($serviceName)->replace('-', '_')->replace('.', '_')->value();
+ if ($transformedServiceName === $urlFor) {
+ $serviceExists = true;
+ break;
+ }
}
- if (is_null($domainExists)) {
- $domains->put((string) $urlFor, [
- 'domain' => $url,
- ]);
- $resource->docker_compose_domains = $domains->toJson();
- $resource->save();
+
+ // Only add domain if the service exists
+ if ($serviceExists) {
+ $domains = collect(json_decode(data_get($resource, 'docker_compose_domains'))) ?? collect([]);
+ $domainExists = data_get($domains->get($urlFor), 'domain');
+ $envExists = $resource->environment_variables()->where('key', $key->value())->first();
+ if ($domainExists !== $envExists->value) {
+ $envExists->update([
+ 'value' => $url,
+ ]);
+ }
+ if (is_null($domainExists)) {
+ $domains->put((string) $urlFor, [
+ 'domain' => $url,
+ ]);
+ $resource->docker_compose_domains = $domains->toJson();
+ $resource->save();
+ }
}
}
} else {
@@ -910,7 +936,7 @@ function applicationParser(Application $resource, int $pull_request_id = 0, ?int
$preview = $resource->previews()->find($preview_id);
$docker_compose_domains = collect(json_decode(data_get($preview, 'docker_compose_domains')));
if ($docker_compose_domains->count() > 0) {
- $found_fqdn = data_get($docker_compose_domains, "$serviceName.domain");
+ $found_fqdn = data_get($docker_compose_domains, "$changedServiceName.domain");
if ($found_fqdn) {
$fqdns = collect($found_fqdn);
} else {
@@ -1146,6 +1172,9 @@ function serviceParser(Service $resource): Collection
$parsedServices = collect([]);
+ // Generate SERVICE_NAME variables for docker compose services
+ $serviceNameEnvironments = generateDockerComposeServiceName($services);
+
$allMagicEnvironments = collect([]);
// Presave services
foreach ($services as $serviceName => $service) {
@@ -1962,7 +1991,7 @@ function serviceParser(Service $resource): Collection
$payload['volumes'] = $volumesParsed;
}
if ($environment->count() > 0 || $coolifyEnvironments->count() > 0) {
- $payload['environment'] = $environment->merge($coolifyEnvironments);
+ $payload['environment'] = $environment->merge($coolifyEnvironments)->merge($serviceNameEnvironments);
}
if ($logging) {
$payload['logging'] = $logging;
diff --git a/bootstrap/helpers/proxy.php b/bootstrap/helpers/proxy.php
index 5bc1d005e..924bad307 100644
--- a/bootstrap/helpers/proxy.php
+++ b/bootstrap/helpers/proxy.php
@@ -108,7 +108,63 @@ function connectProxyToNetworks(Server $server)
return $commands->flatten();
}
-function generate_default_proxy_configuration(Server $server)
+function extractCustomProxyCommands(Server $server, string $existing_config): array
+{
+ $custom_commands = [];
+ $proxy_type = $server->proxyType();
+
+ if ($proxy_type !== ProxyTypes::TRAEFIK->value || empty($existing_config)) {
+ return $custom_commands;
+ }
+
+ try {
+ $yaml = Yaml::parse($existing_config);
+ $existing_commands = data_get($yaml, 'services.traefik.command', []);
+
+ if (empty($existing_commands)) {
+ return $custom_commands;
+ }
+
+ // Define default commands that Coolify generates
+ $default_command_prefixes = [
+ '--ping=',
+ '--api.',
+ '--entrypoints.http.address=',
+ '--entrypoints.https.address=',
+ '--entrypoints.http.http.encodequerysemicolons=',
+ '--entryPoints.http.http2.maxConcurrentStreams=',
+ '--entrypoints.https.http.encodequerysemicolons=',
+ '--entryPoints.https.http2.maxConcurrentStreams=',
+ '--entrypoints.https.http3',
+ '--providers.file.',
+ '--certificatesresolvers.',
+ '--providers.docker',
+ '--providers.swarm',
+ '--log.level=',
+ '--accesslog.',
+ ];
+
+ // Extract commands that don't match default prefixes (these are custom)
+ foreach ($existing_commands as $command) {
+ $is_default = false;
+ foreach ($default_command_prefixes as $prefix) {
+ if (str_starts_with($command, $prefix)) {
+ $is_default = true;
+ break;
+ }
+ }
+ if (! $is_default) {
+ $custom_commands[] = $command;
+ }
+ }
+ } catch (\Exception $e) {
+ // If we can't parse the config, return empty array
+ // Silently fail to avoid breaking the proxy regeneration
+ }
+
+ return $custom_commands;
+}
+function generateDefaultProxyConfiguration(Server $server, array $custom_commands = [])
{
$proxy_path = $server->proxyPath();
$proxy_type = $server->proxyType();
@@ -228,6 +284,13 @@ function generate_default_proxy_configuration(Server $server)
$config['services']['traefik']['command'][] = '--providers.docker=true';
$config['services']['traefik']['command'][] = '--providers.docker.exposedbydefault=false';
}
+
+ // Append custom commands (e.g., trustedIPs for Cloudflare)
+ if (! empty($custom_commands)) {
+ foreach ($custom_commands as $custom_command) {
+ $config['services']['traefik']['command'][] = $custom_command;
+ }
+ }
} elseif ($proxy_type === 'CADDY') {
$config = [
'networks' => $array_of_networks->toArray(),
diff --git a/bootstrap/helpers/remoteProcess.php b/bootstrap/helpers/remoteProcess.php
index 56386a55f..3218bf878 100644
--- a/bootstrap/helpers/remoteProcess.php
+++ b/bootstrap/helpers/remoteProcess.php
@@ -84,64 +84,6 @@ function () use ($source, $dest, $server) {
);
}
-function transfer_file_to_container(string $content, string $container_path, string $deployment_uuid, Server $server, bool $throwError = true): ?string
-{
- $temp_file = tempnam(sys_get_temp_dir(), 'coolify_env_');
-
- try {
- // Write content to temporary file
- file_put_contents($temp_file, $content);
-
- // Generate unique filename for server transfer
- $server_temp_file = '/tmp/coolify_env_'.uniqid().'_'.$deployment_uuid;
-
- // Transfer file to server
- instant_scp($temp_file, $server_temp_file, $server, $throwError);
-
- // Ensure parent directory exists in container, then copy file
- $parent_dir = dirname($container_path);
- $commands = [];
- if ($parent_dir !== '.' && $parent_dir !== '/') {
- $commands[] = executeInDocker($deployment_uuid, "mkdir -p \"$parent_dir\"");
- }
- $commands[] = "docker cp $server_temp_file $deployment_uuid:$container_path";
- $commands[] = "rm -f $server_temp_file"; // Cleanup server temp file
-
- return instant_remote_process_with_timeout($commands, $server, $throwError);
-
- } finally {
- // Always cleanup local temp file
- if (file_exists($temp_file)) {
- unlink($temp_file);
- }
- }
-}
-
-function transfer_file_to_server(string $content, string $server_path, Server $server, bool $throwError = true): ?string
-{
- $temp_file = tempnam(sys_get_temp_dir(), 'coolify_env_');
-
- try {
- // Write content to temporary file
- file_put_contents($temp_file, $content);
-
- // Ensure parent directory exists on server
- $parent_dir = dirname($server_path);
- if ($parent_dir !== '.' && $parent_dir !== '/') {
- instant_remote_process_with_timeout(["mkdir -p \"$parent_dir\""], $server, $throwError);
- }
-
- // Transfer file directly to server destination
- return instant_scp($temp_file, $server_path, $server, $throwError);
-
- } finally {
- // Always cleanup local temp file
- if (file_exists($temp_file)) {
- unlink($temp_file);
- }
- }
-}
-
function instant_remote_process_with_timeout(Collection|array $command, Server $server, bool $throwError = true, bool $no_sudo = false): ?string
{
$command = $command instanceof Collection ? $command->toArray() : $command;
diff --git a/bootstrap/helpers/shared.php b/bootstrap/helpers/shared.php
index a0ab5a704..656c607bf 100644
--- a/bootstrap/helpers/shared.php
+++ b/bootstrap/helpers/shared.php
@@ -634,10 +634,14 @@ function getTopLevelNetworks(Service|Application $resource)
$definedNetwork = collect([$resource->uuid]);
$services = collect($services)->map(function ($service, $_) use ($topLevelNetworks, $definedNetwork) {
$serviceNetworks = collect(data_get($service, 'networks', []));
- $hasHostNetworkMode = data_get($service, 'network_mode') === 'host' ? true : false;
+ $networkMode = data_get($service, 'network_mode');
- // Only add 'networks' key if 'network_mode' is not 'host'
- if (! $hasHostNetworkMode) {
+ $hasValidNetworkMode =
+ $networkMode === 'host' ||
+ (is_string($networkMode) && (str_starts_with($networkMode, 'service:') || str_starts_with($networkMode, 'container:')));
+
+ // Only add 'networks' key if 'network_mode' is not 'host' or does not start with 'service:' or 'container:'
+ if (! $hasValidNetworkMode) {
// Collect/create/update networks
if ($serviceNetworks->count() > 0) {
foreach ($serviceNetworks as $networkName => $networkDetails) {
@@ -1272,7 +1276,12 @@ function parseDockerComposeFile(Service|Application $resource, bool $isNew = fal
$serviceNetworks = collect(data_get($service, 'networks', []));
$serviceVariables = collect(data_get($service, 'environment', []));
$serviceLabels = collect(data_get($service, 'labels', []));
- $hasHostNetworkMode = data_get($service, 'network_mode') === 'host' ? true : false;
+ $networkMode = data_get($service, 'network_mode');
+
+ $hasValidNetworkMode =
+ $networkMode === 'host' ||
+ (is_string($networkMode) && (str_starts_with($networkMode, 'service:') || str_starts_with($networkMode, 'container:')));
+
if ($serviceLabels->count() > 0) {
$removedLabels = collect([]);
$serviceLabels = $serviceLabels->filter(function ($serviceLabel, $serviceLabelName) use ($removedLabels) {
@@ -1383,7 +1392,7 @@ function parseDockerComposeFile(Service|Application $resource, bool $isNew = fal
$savedService->ports = $collectedPorts->implode(',');
$savedService->save();
- if (! $hasHostNetworkMode) {
+ if (! $hasValidNetworkMode) {
// Add Coolify specific networks
$definedNetworkExists = $topLevelNetworks->contains(function ($value, $_) use ($definedNetwork) {
return $value == $definedNetwork;
diff --git a/bootstrap/helpers/socialite.php b/bootstrap/helpers/socialite.php
index 961f6809b..fd3fbe74b 100644
--- a/bootstrap/helpers/socialite.php
+++ b/bootstrap/helpers/socialite.php
@@ -70,8 +70,14 @@ function get_socialite_provider(string $provider)
'infomaniak' => \SocialiteProviders\Infomaniak\Provider::class,
];
- return Socialite::buildProvider(
+ $socialite = Socialite::buildProvider(
$provider_class_map[$provider],
$config
);
+
+ if ($provider == 'gitlab' && ! empty($oauth_setting->base_url)) {
+ $socialite->setHost($oauth_setting->base_url);
+ }
+
+ return $socialite;
}
diff --git a/config/constants.php b/config/constants.php
index 224f2dfb5..01eaa7fa1 100644
--- a/config/constants.php
+++ b/config/constants.php
@@ -2,7 +2,7 @@
return [
'coolify' => [
- 'version' => '4.0.0-beta.429',
+ 'version' => '4.0.0-beta.435',
'helper_version' => '1.0.11',
'realtime_version' => '1.0.10',
'self_hosted' => env('SELF_HOSTED', true),
@@ -64,7 +64,7 @@
'mux_max_age' => env('SSH_MUX_MAX_AGE', 1800), // 30 minutes
'connection_timeout' => 10,
'server_interval' => 20,
- 'command_timeout' => 7200,
+ 'command_timeout' => 3600,
'max_retries' => env('SSH_MAX_RETRIES', 3),
'retry_base_delay' => env('SSH_RETRY_BASE_DELAY', 2), // seconds
'retry_max_delay' => env('SSH_RETRY_MAX_DELAY', 30), // seconds
diff --git a/database/factories/TeamFactory.php b/database/factories/TeamFactory.php
new file mode 100644
index 000000000..26748c54e
--- /dev/null
+++ b/database/factories/TeamFactory.php
@@ -0,0 +1,40 @@
+
+ */
+class TeamFactory extends Factory
+{
+ protected $model = Team::class;
+
+ /**
+ * Define the model's default state.
+ *
+ * @return array
+ */
+ public function definition(): array
+ {
+ return [
+ 'name' => $this->faker->company().' Team',
+ 'description' => $this->faker->sentence(),
+ 'personal_team' => false,
+ 'show_boarding' => false,
+ ];
+ }
+
+ /**
+ * Indicate that the team is a personal team.
+ */
+ public function personal(): static
+ {
+ return $this->state(fn (array $attributes) => [
+ 'personal_team' => true,
+ 'name' => $this->faker->firstName()."'s Team",
+ ]);
+ }
+}
diff --git a/database/migrations/2025_10_03_154100_update_clickhouse_image.php b/database/migrations/2025_10_03_154100_update_clickhouse_image.php
new file mode 100644
index 000000000..e52bbcc16
--- /dev/null
+++ b/database/migrations/2025_10_03_154100_update_clickhouse_image.php
@@ -0,0 +1,32 @@
+string('image')->default('bitnamilegacy/clickhouse')->change();
+ });
+ // Optionally, update any existing rows with the old default to the new one
+ DB::table('standalone_clickhouses')
+ ->where('image', 'bitnami/clickhouse')
+ ->update(['image' => 'bitnamilegacy/clickhouse']);
+ }
+
+ public function down()
+ {
+ Schema::table('standalone_clickhouses', function (Blueprint $table) {
+ $table->string('image')->default('bitnami/clickhouse')->change();
+ });
+ // Optionally, revert any changed values
+ DB::table('standalone_clickhouses')
+ ->where('image', 'bitnamilegacy/clickhouse')
+ ->update(['image' => 'bitnami/clickhouse']);
+ }
+};
\ No newline at end of file
diff --git a/database/migrations/2025_10_07_120723_add_s3_uploaded_to_scheduled_database_backup_executions_table.php b/database/migrations/2025_10_07_120723_add_s3_uploaded_to_scheduled_database_backup_executions_table.php
new file mode 100644
index 000000000..d80f2621b
--- /dev/null
+++ b/database/migrations/2025_10_07_120723_add_s3_uploaded_to_scheduled_database_backup_executions_table.php
@@ -0,0 +1,28 @@
+boolean('s3_uploaded')->nullable()->after('filename');
+ });
+ }
+
+ /**
+ * Reverse the migrations.
+ */
+ public function down(): void
+ {
+ Schema::table('scheduled_database_backup_executions', function (Blueprint $table) {
+ $table->dropColumn('s3_uploaded');
+ });
+ }
+};
diff --git a/database/seeders/DatabaseSeeder.php b/database/seeders/DatabaseSeeder.php
index e0e7a3ba5..57ccab4ae 100644
--- a/database/seeders/DatabaseSeeder.php
+++ b/database/seeders/DatabaseSeeder.php
@@ -29,6 +29,7 @@ public function run(): void
DisableTwoStepConfirmationSeeder::class,
SentinelSeeder::class,
CaSslCertSeeder::class,
+ PersonalAccessTokenSeeder::class,
]);
}
}
diff --git a/database/seeders/PersonalAccessTokenSeeder.php b/database/seeders/PersonalAccessTokenSeeder.php
new file mode 100644
index 000000000..38a45219c
--- /dev/null
+++ b/database/seeders/PersonalAccessTokenSeeder.php
@@ -0,0 +1,115 @@
+environment('production')) {
+ $this->command->warn('Skipping PersonalAccessTokenSeeder in production environment');
+
+ return;
+ }
+
+ // Get the first user (usually the admin user created during setup)
+ $user = User::find(0);
+
+ if (! $user) {
+ $this->command->warn('No user found. Please run UserSeeder first.');
+
+ return;
+ }
+
+ // Get the user's first team
+ $team = $user->teams()->first();
+
+ if (! $team) {
+ $this->command->warn('No team found for user. Cannot create API tokens.');
+
+ return;
+ }
+
+ // Define test tokens with different scopes
+ $testTokens = [
+ [
+ 'name' => 'Development Root Token',
+ 'token' => 'root',
+ 'abilities' => ['root'],
+ ],
+ [
+ 'name' => 'Development Read Token',
+ 'token' => 'read',
+ 'abilities' => ['read'],
+ ],
+ [
+ 'name' => 'Development Read Sensitive Token',
+ 'token' => 'read-sensitive',
+ 'abilities' => ['read', 'read:sensitive'],
+ ],
+ [
+ 'name' => 'Development Write Token',
+ 'token' => 'write',
+ 'abilities' => ['write'],
+ ],
+ [
+ 'name' => 'Development Write Sensitive Token',
+ 'token' => 'write-sensitive',
+ 'abilities' => ['write', 'write:sensitive'],
+ ],
+ [
+ 'name' => 'Development Deploy Token',
+ 'token' => 'deploy',
+ 'abilities' => ['deploy'],
+ ],
+ ];
+
+ // First, remove all existing development tokens for this user
+ $deletedCount = PersonalAccessToken::where('tokenable_id', $user->id)
+ ->where('tokenable_type', get_class($user))
+ ->whereIn('name', array_column($testTokens, 'name'))
+ ->delete();
+
+ if ($deletedCount > 0) {
+ $this->command->info("Removed {$deletedCount} existing development token(s).");
+ }
+
+ // Now create fresh tokens
+ foreach ($testTokens as $tokenData) {
+ // Create the token with a simple format: Bearer {scope}
+ // The token format in the database is the hash of the plain text token
+ $plainTextToken = $tokenData['token'];
+
+ PersonalAccessToken::create([
+ 'tokenable_type' => get_class($user),
+ 'tokenable_id' => $user->id,
+ 'name' => $tokenData['name'],
+ 'token' => hash('sha256', $plainTextToken),
+ 'abilities' => $tokenData['abilities'],
+ 'team_id' => $team->id,
+ ]);
+
+ $this->command->info("Created token '{$tokenData['name']}' with Bearer token: {$plainTextToken}");
+ }
+
+ $this->command->info('');
+ $this->command->info('Test API tokens created successfully!');
+ $this->command->info('You can use these tokens in development as:');
+ $this->command->info(' Bearer root - Root access');
+ $this->command->info(' Bearer read - Read only access');
+ $this->command->info(' Bearer read-sensitive - Read with sensitive data access');
+ $this->command->info(' Bearer write - Write access');
+ $this->command->info(' Bearer write-sensitive - Write with sensitive data access');
+ $this->command->info(' Bearer deploy - Deploy access');
+ }
+}
diff --git a/docker/production/Dockerfile b/docker/production/Dockerfile
index 6c9628a81..628fb5054 100644
--- a/docker/production/Dockerfile
+++ b/docker/production/Dockerfile
@@ -72,6 +72,7 @@ RUN apk add --no-cache gnupg && \
curl -fSsL https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor > /usr/share/keyrings/postgresql.gpg
# Install system dependencies
+RUN apk upgrade
RUN apk add --no-cache \
postgresql${POSTGRES_VERSION}-client \
openssh-client \
diff --git a/openapi.json b/openapi.json
index 2b0a81c6e..901741dd0 100644
--- a/openapi.json
+++ b/openapi.json
@@ -3309,6 +3309,55 @@
]
}
},
+ "\/databases\/{uuid}\/backups": {
+ "get": {
+ "tags": [
+ "Databases"
+ ],
+ "summary": "Get",
+ "description": "Get backups details by database UUID.",
+ "operationId": "get-database-backups-by-uuid",
+ "parameters": [
+ {
+ "name": "uuid",
+ "in": "path",
+ "description": "UUID of the database.",
+ "required": true,
+ "schema": {
+ "type": "string",
+ "format": "uuid"
+ }
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Get all backups for a database",
+ "content": {
+ "application\/json": {
+ "schema": {
+ "type": "string"
+ },
+ "example": "Content is very complex. Will be implemented later."
+ }
+ }
+ },
+ "401": {
+ "$ref": "#\/components\/responses\/401"
+ },
+ "400": {
+ "$ref": "#\/components\/responses\/400"
+ },
+ "404": {
+ "$ref": "#\/components\/responses\/404"
+ }
+ },
+ "security": [
+ {
+ "bearerAuth": []
+ }
+ ]
+ }
+ },
"\/databases\/{uuid}": {
"get": {
"tags": [
@@ -3658,6 +3707,200 @@
]
}
},
+ "\/databases\/{uuid}\/backups\/{scheduled_backup_uuid}": {
+ "delete": {
+ "tags": [
+ "Databases"
+ ],
+ "summary": "Delete backup configuration",
+ "description": "Deletes a backup configuration and all its executions.",
+ "operationId": "delete-backup-configuration-by-uuid",
+ "parameters": [
+ {
+ "name": "uuid",
+ "in": "path",
+ "description": "UUID of the database",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "scheduled_backup_uuid",
+ "in": "path",
+ "description": "UUID of the backup configuration to delete",
+ "required": true,
+ "schema": {
+ "type": "string",
+ "format": "uuid"
+ }
+ },
+ {
+ "name": "delete_s3",
+ "in": "query",
+ "description": "Whether to delete all backup files from S3",
+ "required": false,
+ "schema": {
+ "type": "boolean",
+ "default": false
+ }
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Backup configuration deleted.",
+ "content": {
+ "application\/json": {
+ "schema": {
+ "properties": {
+ "": {
+ "type": "string",
+ "example": "Backup configuration and all executions deleted."
+ }
+ },
+ "type": "object"
+ }
+ }
+ }
+ },
+ "404": {
+ "description": "Backup configuration not found.",
+ "content": {
+ "application\/json": {
+ "schema": {
+ "properties": {
+ "": {
+ "type": "string",
+ "example": "Backup configuration not found."
+ }
+ },
+ "type": "object"
+ }
+ }
+ }
+ }
+ },
+ "security": [
+ {
+ "bearerAuth": []
+ }
+ ]
+ },
+ "patch": {
+ "tags": [
+ "Databases"
+ ],
+ "summary": "Update",
+ "description": "Update a specific backup configuration for a given database, identified by its UUID and the backup ID",
+ "operationId": "update-database-backup",
+ "parameters": [
+ {
+ "name": "uuid",
+ "in": "path",
+ "description": "UUID of the database.",
+ "required": true,
+ "schema": {
+ "type": "string",
+ "format": "uuid"
+ }
+ },
+ {
+ "name": "scheduled_backup_uuid",
+ "in": "path",
+ "description": "UUID of the backup configuration.",
+ "required": true,
+ "schema": {
+ "type": "string",
+ "format": "uuid"
+ }
+ }
+ ],
+ "requestBody": {
+ "description": "Database backup configuration data",
+ "required": true,
+ "content": {
+ "application\/json": {
+ "schema": {
+ "properties": {
+ "save_s3": {
+ "type": "boolean",
+ "description": "Whether data is saved in s3 or not"
+ },
+ "s3_storage_uuid": {
+ "type": "string",
+ "description": "S3 storage UUID"
+ },
+ "backup_now": {
+ "type": "boolean",
+ "description": "Whether to take a backup now or not"
+ },
+ "enabled": {
+ "type": "boolean",
+ "description": "Whether the backup is enabled or not"
+ },
+ "databases_to_backup": {
+ "type": "string",
+ "description": "Comma separated list of databases to backup"
+ },
+ "dump_all": {
+ "type": "boolean",
+ "description": "Whether all databases are dumped or not"
+ },
+ "frequency": {
+ "type": "string",
+ "description": "Frequency of the backup"
+ },
+ "database_backup_retention_amount_locally": {
+ "type": "integer",
+ "description": "Retention amount of the backup locally"
+ },
+ "database_backup_retention_days_locally": {
+ "type": "integer",
+ "description": "Retention days of the backup locally"
+ },
+ "database_backup_retention_max_storage_locally": {
+ "type": "integer",
+ "description": "Max storage of the backup locally"
+ },
+ "database_backup_retention_amount_s3": {
+ "type": "integer",
+ "description": "Retention amount of the backup in s3"
+ },
+ "database_backup_retention_days_s3": {
+ "type": "integer",
+ "description": "Retention days of the backup in s3"
+ },
+ "database_backup_retention_max_storage_s3": {
+ "type": "integer",
+ "description": "Max storage of the backup in S3"
+ }
+ },
+ "type": "object"
+ }
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "Database backup configuration updated"
+ },
+ "401": {
+ "$ref": "#\/components\/responses\/401"
+ },
+ "400": {
+ "$ref": "#\/components\/responses\/400"
+ },
+ "404": {
+ "$ref": "#\/components\/responses\/404"
+ }
+ },
+ "security": [
+ {
+ "bearerAuth": []
+ }
+ ]
+ }
+ },
"\/databases\/postgresql": {
"post": {
"tags": [
@@ -4694,6 +4937,175 @@
]
}
},
+ "\/databases\/{uuid}\/backups\/{scheduled_backup_uuid}\/executions\/{execution_uuid}": {
+ "delete": {
+ "tags": [
+ "Databases"
+ ],
+ "summary": "Delete backup execution",
+ "description": "Deletes a specific backup execution.",
+ "operationId": "delete-backup-execution-by-uuid",
+ "parameters": [
+ {
+ "name": "uuid",
+ "in": "path",
+ "description": "UUID of the database",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "scheduled_backup_uuid",
+ "in": "path",
+ "description": "UUID of the backup configuration",
+ "required": true,
+ "schema": {
+ "type": "string",
+ "format": "uuid"
+ }
+ },
+ {
+ "name": "execution_uuid",
+ "in": "path",
+ "description": "UUID of the backup execution to delete",
+ "required": true,
+ "schema": {
+ "type": "string",
+ "format": "uuid"
+ }
+ },
+ {
+ "name": "delete_s3",
+ "in": "query",
+ "description": "Whether to delete the backup from S3",
+ "required": false,
+ "schema": {
+ "type": "boolean",
+ "default": false
+ }
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Backup execution deleted.",
+ "content": {
+ "application\/json": {
+ "schema": {
+ "properties": {
+ "": {
+ "type": "string",
+ "example": "Backup execution deleted."
+ }
+ },
+ "type": "object"
+ }
+ }
+ }
+ },
+ "404": {
+ "description": "Backup execution not found.",
+ "content": {
+ "application\/json": {
+ "schema": {
+ "properties": {
+ "": {
+ "type": "string",
+ "example": "Backup execution not found."
+ }
+ },
+ "type": "object"
+ }
+ }
+ }
+ }
+ },
+ "security": [
+ {
+ "bearerAuth": []
+ }
+ ]
+ }
+ },
+ "\/databases\/{uuid}\/backups\/{scheduled_backup_uuid}\/executions": {
+ "get": {
+ "tags": [
+ "Databases"
+ ],
+ "summary": "List backup executions",
+ "description": "Get all executions for a specific backup configuration.",
+ "operationId": "list-backup-executions",
+ "parameters": [
+ {
+ "name": "uuid",
+ "in": "path",
+ "description": "UUID of the database",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "scheduled_backup_uuid",
+ "in": "path",
+ "description": "UUID of the backup configuration",
+ "required": true,
+ "schema": {
+ "type": "string",
+ "format": "uuid"
+ }
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "List of backup executions",
+ "content": {
+ "application\/json": {
+ "schema": {
+ "properties": {
+ "": {
+ "type": "array",
+ "items": {
+ "properties": {
+ "uuid": {
+ "type": "string"
+ },
+ "filename": {
+ "type": "string"
+ },
+ "size": {
+ "type": "integer"
+ },
+ "created_at": {
+ "type": "string"
+ },
+ "message": {
+ "type": "string"
+ },
+ "status": {
+ "type": "string"
+ }
+ },
+ "type": "object"
+ }
+ }
+ },
+ "type": "object"
+ }
+ }
+ }
+ },
+ "404": {
+ "description": "Backup configuration not found."
+ }
+ },
+ "security": [
+ {
+ "bearerAuth": []
+ }
+ ]
+ }
+ },
"\/databases\/{uuid}\/start": {
"get": {
"tags": [
@@ -5095,6 +5507,477 @@
]
}
},
+ "\/github-apps": {
+ "post": {
+ "tags": [
+ "GitHub Apps"
+ ],
+ "summary": "Create GitHub App",
+ "description": "Create a new GitHub app.",
+ "operationId": "create-github-app",
+ "requestBody": {
+ "description": "GitHub app creation payload.",
+ "required": true,
+ "content": {
+ "application\/json": {
+ "schema": {
+ "required": [
+ "name",
+ "api_url",
+ "html_url",
+ "app_id",
+ "installation_id",
+ "client_id",
+ "client_secret",
+ "private_key_uuid"
+ ],
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "Name of the GitHub app."
+ },
+ "organization": {
+ "type": "string",
+ "nullable": true,
+ "description": "Organization to associate the app with."
+ },
+ "api_url": {
+ "type": "string",
+ "description": "API URL for the GitHub app (e.g., https:\/\/api.github.com)."
+ },
+ "html_url": {
+ "type": "string",
+ "description": "HTML URL for the GitHub app (e.g., https:\/\/github.com)."
+ },
+ "custom_user": {
+ "type": "string",
+ "description": "Custom user for SSH access (default: git)."
+ },
+ "custom_port": {
+ "type": "integer",
+ "description": "Custom port for SSH access (default: 22)."
+ },
+ "app_id": {
+ "type": "integer",
+ "description": "GitHub App ID from GitHub."
+ },
+ "installation_id": {
+ "type": "integer",
+ "description": "GitHub Installation ID."
+ },
+ "client_id": {
+ "type": "string",
+ "description": "GitHub OAuth App Client ID."
+ },
+ "client_secret": {
+ "type": "string",
+ "description": "GitHub OAuth App Client Secret."
+ },
+ "webhook_secret": {
+ "type": "string",
+ "description": "Webhook secret for GitHub webhooks."
+ },
+ "private_key_uuid": {
+ "type": "string",
+ "description": "UUID of an existing private key for GitHub App authentication."
+ },
+ "is_system_wide": {
+ "type": "boolean",
+ "description": "Is this app system-wide (cloud only)."
+ }
+ },
+ "type": "object"
+ }
+ }
+ }
+ },
+ "responses": {
+ "201": {
+ "description": "GitHub app created successfully.",
+ "content": {
+ "application\/json": {
+ "schema": {
+ "properties": {
+ "id": {
+ "type": "integer"
+ },
+ "uuid": {
+ "type": "string"
+ },
+ "name": {
+ "type": "string"
+ },
+ "organization": {
+ "type": "string",
+ "nullable": true
+ },
+ "api_url": {
+ "type": "string"
+ },
+ "html_url": {
+ "type": "string"
+ },
+ "custom_user": {
+ "type": "string"
+ },
+ "custom_port": {
+ "type": "integer"
+ },
+ "app_id": {
+ "type": "integer"
+ },
+ "installation_id": {
+ "type": "integer"
+ },
+ "client_id": {
+ "type": "string"
+ },
+ "private_key_id": {
+ "type": "integer"
+ },
+ "is_system_wide": {
+ "type": "boolean"
+ },
+ "team_id": {
+ "type": "integer"
+ }
+ },
+ "type": "object"
+ }
+ }
+ }
+ },
+ "400": {
+ "$ref": "#\/components\/responses\/400"
+ },
+ "401": {
+ "$ref": "#\/components\/responses\/401"
+ },
+ "422": {
+ "$ref": "#\/components\/responses\/422"
+ }
+ },
+ "security": [
+ {
+ "bearerAuth": []
+ }
+ ]
+ }
+ },
+ "\/github-apps\/{github_app_id}\/repositories": {
+ "get": {
+ "tags": [
+ "GitHub Apps"
+ ],
+ "summary": "Load Repositories for a GitHub App",
+ "description": "Fetch repositories from GitHub for a given GitHub app.",
+ "operationId": "load-repositories",
+ "parameters": [
+ {
+ "name": "github_app_id",
+ "in": "path",
+ "description": "GitHub App ID",
+ "required": true,
+ "schema": {
+ "type": "integer"
+ }
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Repositories loaded successfully.",
+ "content": {
+ "application\/json": {
+ "schema": {
+ "properties": {
+ "": {
+ "type": "array",
+ "items": {
+ "type": "object"
+ }
+ }
+ },
+ "type": "object"
+ }
+ }
+ }
+ },
+ "400": {
+ "$ref": "#\/components\/responses\/400"
+ },
+ "401": {
+ "$ref": "#\/components\/responses\/401"
+ },
+ "404": {
+ "$ref": "#\/components\/responses\/404"
+ }
+ },
+ "security": [
+ {
+ "bearerAuth": []
+ }
+ ]
+ }
+ },
+ "\/github-apps\/{github_app_id}\/repositories\/{owner}\/{repo}\/branches": {
+ "get": {
+ "tags": [
+ "GitHub Apps"
+ ],
+ "summary": "Load Branches for a GitHub Repository",
+ "description": "Fetch branches from GitHub for a given repository.",
+ "operationId": "load-branches",
+ "parameters": [
+ {
+ "name": "github_app_id",
+ "in": "path",
+ "description": "GitHub App ID",
+ "required": true,
+ "schema": {
+ "type": "integer"
+ }
+ },
+ {
+ "name": "owner",
+ "in": "path",
+ "description": "Repository owner",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "repo",
+ "in": "path",
+ "description": "Repository name",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Branches loaded successfully.",
+ "content": {
+ "application\/json": {
+ "schema": {
+ "properties": {
+ "": {
+ "type": "array",
+ "items": {
+ "type": "object"
+ }
+ }
+ },
+ "type": "object"
+ }
+ }
+ }
+ },
+ "400": {
+ "$ref": "#\/components\/responses\/400"
+ },
+ "401": {
+ "$ref": "#\/components\/responses\/401"
+ },
+ "404": {
+ "$ref": "#\/components\/responses\/404"
+ }
+ },
+ "security": [
+ {
+ "bearerAuth": []
+ }
+ ]
+ }
+ },
+ "\/github-apps\/{github_app_id}": {
+ "delete": {
+ "tags": [
+ "GitHub Apps"
+ ],
+ "summary": "Delete GitHub App",
+ "description": "Delete a GitHub app if it's not being used by any applications.",
+ "operationId": "deleteGithubApp",
+ "parameters": [
+ {
+ "name": "github_app_id",
+ "in": "path",
+ "description": "GitHub App ID",
+ "required": true,
+ "schema": {
+ "type": "integer"
+ }
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "GitHub app deleted successfully",
+ "content": {
+ "application\/json": {
+ "schema": {
+ "properties": {
+ "message": {
+ "type": "string",
+ "example": "GitHub app deleted successfully"
+ }
+ },
+ "type": "object"
+ }
+ }
+ }
+ },
+ "401": {
+ "description": "Unauthorized"
+ },
+ "404": {
+ "description": "GitHub app not found"
+ },
+ "409": {
+ "description": "Conflict - GitHub app is in use",
+ "content": {
+ "application\/json": {
+ "schema": {
+ "properties": {
+ "message": {
+ "type": "string",
+ "example": "This GitHub app is being used by 5 application(s). Please delete all applications first."
+ }
+ },
+ "type": "object"
+ }
+ }
+ }
+ }
+ },
+ "security": [
+ {
+ "bearerAuth": []
+ }
+ ]
+ },
+ "patch": {
+ "tags": [
+ "GitHub Apps"
+ ],
+ "summary": "Update GitHub App",
+ "description": "Update an existing GitHub app.",
+ "operationId": "updateGithubApp",
+ "parameters": [
+ {
+ "name": "github_app_id",
+ "in": "path",
+ "description": "GitHub App ID",
+ "required": true,
+ "schema": {
+ "type": "integer"
+ }
+ }
+ ],
+ "requestBody": {
+ "required": true,
+ "content": {
+ "application\/json": {
+ "schema": {
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "GitHub App name"
+ },
+ "organization": {
+ "type": "string",
+ "nullable": true,
+ "description": "GitHub organization"
+ },
+ "api_url": {
+ "type": "string",
+ "description": "GitHub API URL"
+ },
+ "html_url": {
+ "type": "string",
+ "description": "GitHub HTML URL"
+ },
+ "custom_user": {
+ "type": "string",
+ "description": "Custom user for SSH"
+ },
+ "custom_port": {
+ "type": "integer",
+ "description": "Custom port for SSH"
+ },
+ "app_id": {
+ "type": "integer",
+ "description": "GitHub App ID"
+ },
+ "installation_id": {
+ "type": "integer",
+ "description": "GitHub Installation ID"
+ },
+ "client_id": {
+ "type": "string",
+ "description": "GitHub Client ID"
+ },
+ "client_secret": {
+ "type": "string",
+ "description": "GitHub Client Secret"
+ },
+ "webhook_secret": {
+ "type": "string",
+ "description": "GitHub Webhook Secret"
+ },
+ "private_key_uuid": {
+ "type": "string",
+ "description": "Private key UUID"
+ },
+ "is_system_wide": {
+ "type": "boolean",
+ "description": "Is system wide (non-cloud instances only)"
+ }
+ },
+ "type": "object"
+ }
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "GitHub app updated successfully",
+ "content": {
+ "application\/json": {
+ "schema": {
+ "properties": {
+ "message": {
+ "type": "string",
+ "example": "GitHub app updated successfully"
+ },
+ "data": {
+ "type": "object",
+ "description": "Updated GitHub app data"
+ }
+ },
+ "type": "object"
+ }
+ }
+ }
+ },
+ "401": {
+ "description": "Unauthorized"
+ },
+ "404": {
+ "description": "GitHub app not found"
+ },
+ "422": {
+ "description": "Validation error"
+ }
+ },
+ "security": [
+ {
+ "bearerAuth": []
+ }
+ ]
+ }
+ },
"\/version": {
"get": {
"summary": "Version",
@@ -8890,6 +9773,10 @@
"name": "Deployments",
"description": "Deployments"
},
+ {
+ "name": "GitHub Apps",
+ "description": "GitHub Apps"
+ },
{
"name": "Projects",
"description": "Projects"
diff --git a/openapi.yaml b/openapi.yaml
index 9529fcf87..3e39c5d36 100644
--- a/openapi.yaml
+++ b/openapi.yaml
@@ -2097,6 +2097,39 @@ paths:
security:
-
bearerAuth: []
+ '/databases/{uuid}/backups':
+ get:
+ tags:
+ - Databases
+ summary: Get
+ description: 'Get backups details by database UUID.'
+ operationId: get-database-backups-by-uuid
+ parameters:
+ -
+ name: uuid
+ in: path
+ description: 'UUID of the database.'
+ required: true
+ schema:
+ type: string
+ format: uuid
+ responses:
+ '200':
+ description: 'Get all backups for a database'
+ content:
+ application/json:
+ schema:
+ type: string
+ example: 'Content is very complex. Will be implemented later.'
+ '401':
+ $ref: '#/components/responses/401'
+ '400':
+ $ref: '#/components/responses/400'
+ '404':
+ $ref: '#/components/responses/404'
+ security:
+ -
+ bearerAuth: []
'/databases/{uuid}':
get:
tags:
@@ -2347,6 +2380,139 @@ paths:
security:
-
bearerAuth: []
+ '/databases/{uuid}/backups/{scheduled_backup_uuid}':
+ delete:
+ tags:
+ - Databases
+ summary: 'Delete backup configuration'
+ description: 'Deletes a backup configuration and all its executions.'
+ operationId: delete-backup-configuration-by-uuid
+ parameters:
+ -
+ name: uuid
+ in: path
+ description: 'UUID of the database'
+ required: true
+ schema:
+ type: string
+ -
+ name: scheduled_backup_uuid
+ in: path
+ description: 'UUID of the backup configuration to delete'
+ required: true
+ schema:
+ type: string
+ format: uuid
+ -
+ name: delete_s3
+ in: query
+ description: 'Whether to delete all backup files from S3'
+ required: false
+ schema:
+ type: boolean
+ default: false
+ responses:
+ '200':
+ description: 'Backup configuration deleted.'
+ content:
+ application/json:
+ schema:
+ properties:
+ '': { type: string, example: 'Backup configuration and all executions deleted.' }
+ type: object
+ '404':
+ description: 'Backup configuration not found.'
+ content:
+ application/json:
+ schema:
+ properties:
+ '': { type: string, example: 'Backup configuration not found.' }
+ type: object
+ security:
+ -
+ bearerAuth: []
+ patch:
+ tags:
+ - Databases
+ summary: Update
+ description: 'Update a specific backup configuration for a given database, identified by its UUID and the backup ID'
+ operationId: update-database-backup
+ parameters:
+ -
+ name: uuid
+ in: path
+ description: 'UUID of the database.'
+ required: true
+ schema:
+ type: string
+ format: uuid
+ -
+ name: scheduled_backup_uuid
+ in: path
+ description: 'UUID of the backup configuration.'
+ required: true
+ schema:
+ type: string
+ format: uuid
+ requestBody:
+ description: 'Database backup configuration data'
+ required: true
+ content:
+ application/json:
+ schema:
+ properties:
+ save_s3:
+ type: boolean
+ description: 'Whether data is saved in s3 or not'
+ s3_storage_uuid:
+ type: string
+ description: 'S3 storage UUID'
+ backup_now:
+ type: boolean
+ description: 'Whether to take a backup now or not'
+ enabled:
+ type: boolean
+ description: 'Whether the backup is enabled or not'
+ databases_to_backup:
+ type: string
+ description: 'Comma separated list of databases to backup'
+ dump_all:
+ type: boolean
+ description: 'Whether all databases are dumped or not'
+ frequency:
+ type: string
+ description: 'Frequency of the backup'
+ database_backup_retention_amount_locally:
+ type: integer
+ description: 'Retention amount of the backup locally'
+ database_backup_retention_days_locally:
+ type: integer
+ description: 'Retention days of the backup locally'
+ database_backup_retention_max_storage_locally:
+ type: integer
+ description: 'Max storage of the backup locally'
+ database_backup_retention_amount_s3:
+ type: integer
+ description: 'Retention amount of the backup in s3'
+ database_backup_retention_days_s3:
+ type: integer
+ description: 'Retention days of the backup in s3'
+ database_backup_retention_max_storage_s3:
+ type: integer
+ description: 'Max storage of the backup in S3'
+ type: object
+ responses:
+ '200':
+ description: 'Database backup configuration updated'
+ '401':
+ $ref: '#/components/responses/401'
+ '400':
+ $ref: '#/components/responses/400'
+ '404':
+ $ref: '#/components/responses/404'
+ security:
+ -
+ bearerAuth: []
/databases/postgresql:
post:
tags:
@@ -3094,6 +3260,102 @@ paths:
security:
-
bearerAuth: []
+ '/databases/{uuid}/backups/{scheduled_backup_uuid}/executions/{execution_uuid}':
+ delete:
+ tags:
+ - Databases
+ summary: 'Delete backup execution'
+ description: 'Deletes a specific backup execution.'
+ operationId: delete-backup-execution-by-uuid
+ parameters:
+ -
+ name: uuid
+ in: path
+ description: 'UUID of the database'
+ required: true
+ schema:
+ type: string
+ -
+ name: scheduled_backup_uuid
+ in: path
+ description: 'UUID of the backup configuration'
+ required: true
+ schema:
+ type: string
+ format: uuid
+ -
+ name: execution_uuid
+ in: path
+ description: 'UUID of the backup execution to delete'
+ required: true
+ schema:
+ type: string
+ format: uuid
+ -
+ name: delete_s3
+ in: query
+ description: 'Whether to delete the backup from S3'
+ required: false
+ schema:
+ type: boolean
+ default: false
+ responses:
+ '200':
+ description: 'Backup execution deleted.'
+ content:
+ application/json:
+ schema:
+ properties:
+ '': { type: string, example: 'Backup execution deleted.' }
+ type: object
+ '404':
+ description: 'Backup execution not found.'
+ content:
+ application/json:
+ schema:
+ properties:
+ '': { type: string, example: 'Backup execution not found.' }
+ type: object
+ security:
+ -
+ bearerAuth: []
+ '/databases/{uuid}/backups/{scheduled_backup_uuid}/executions':
+ get:
+ tags:
+ - Databases
+ summary: 'List backup executions'
+ description: 'Get all executions for a specific backup configuration.'
+ operationId: list-backup-executions
+ parameters:
+ -
+ name: uuid
+ in: path
+ description: 'UUID of the database'
+ required: true
+ schema:
+ type: string
+ -
+ name: scheduled_backup_uuid
+ in: path
+ description: 'UUID of the backup configuration'
+ required: true
+ schema:
+ type: string
+ format: uuid
+ responses:
+ '200':
+ description: 'List of backup executions'
+ content:
+ application/json:
+ schema:
+ properties:
+ '': { type: array, items: { properties: { uuid: { type: string }, filename: { type: string }, size: { type: integer }, created_at: { type: string }, message: { type: string }, status: { type: string } }, type: object } }
+ type: object
+ '404':
+ description: 'Backup configuration not found.'
+ security:
+ -
+ bearerAuth: []
'/databases/{uuid}/start':
get:
tags:
@@ -3348,6 +3610,300 @@ paths:
security:
-
bearerAuth: []
+ /github-apps:
+ post:
+ tags:
+ - 'GitHub Apps'
+ summary: 'Create GitHub App'
+ description: 'Create a new GitHub app.'
+ operationId: create-github-app
+ requestBody:
+ description: 'GitHub app creation payload.'
+ required: true
+ content:
+ application/json:
+ schema:
+ required:
+ - name
+ - api_url
+ - html_url
+ - app_id
+ - installation_id
+ - client_id
+ - client_secret
+ - private_key_uuid
+ properties:
+ name:
+ type: string
+ description: 'Name of the GitHub app.'
+ organization:
+ type: string
+ nullable: true
+ description: 'Organization to associate the app with.'
+ api_url:
+ type: string
+ description: 'API URL for the GitHub app (e.g., https://api.github.com).'
+ html_url:
+ type: string
+ description: 'HTML URL for the GitHub app (e.g., https://github.com).'
+ custom_user:
+ type: string
+ description: 'Custom user for SSH access (default: git).'
+ custom_port:
+ type: integer
+ description: 'Custom port for SSH access (default: 22).'
+ app_id:
+ type: integer
+ description: 'GitHub App ID from GitHub.'
+ installation_id:
+ type: integer
+ description: 'GitHub Installation ID.'
+ client_id:
+ type: string
+ description: 'GitHub OAuth App Client ID.'
+ client_secret:
+ type: string
+ description: 'GitHub OAuth App Client Secret.'
+ webhook_secret:
+ type: string
+ description: 'Webhook secret for GitHub webhooks.'
+ private_key_uuid:
+ type: string
+ description: 'UUID of an existing private key for GitHub App authentication.'
+ is_system_wide:
+ type: boolean
+ description: 'Is this app system-wide (cloud only).'
+ type: object
+ responses:
+ '201':
+ description: 'GitHub app created successfully.'
+ content:
+ application/json:
+ schema:
+ properties:
+ id: { type: integer }
+ uuid: { type: string }
+ name: { type: string }
+ organization: { type: string, nullable: true }
+ api_url: { type: string }
+ html_url: { type: string }
+ custom_user: { type: string }
+ custom_port: { type: integer }
+ app_id: { type: integer }
+ installation_id: { type: integer }
+ client_id: { type: string }
+ private_key_id: { type: integer }
+ is_system_wide: { type: boolean }
+ team_id: { type: integer }
+ type: object
+ '400':
+ $ref: '#/components/responses/400'
+ '401':
+ $ref: '#/components/responses/401'
+ '422':
+ $ref: '#/components/responses/422'
+ security:
+ -
+ bearerAuth: []
+ '/github-apps/{github_app_id}/repositories':
+ get:
+ tags:
+ - 'GitHub Apps'
+ summary: 'Load Repositories for a GitHub App'
+ description: 'Fetch repositories from GitHub for a given GitHub app.'
+ operationId: load-repositories
+ parameters:
+ -
+ name: github_app_id
+ in: path
+ description: 'GitHub App ID'
+ required: true
+ schema:
+ type: integer
+ responses:
+ '200':
+ description: 'Repositories loaded successfully.'
+ content:
+ application/json:
+ schema:
+ properties:
+ '': { type: array, items: { type: object } }
+ type: object
+ '400':
+ $ref: '#/components/responses/400'
+ '401':
+ $ref: '#/components/responses/401'
+ '404':
+ $ref: '#/components/responses/404'
+ security:
+ -
+ bearerAuth: []
+ '/github-apps/{github_app_id}/repositories/{owner}/{repo}/branches':
+ get:
+ tags:
+ - 'GitHub Apps'
+ summary: 'Load Branches for a GitHub Repository'
+ description: 'Fetch branches from GitHub for a given repository.'
+ operationId: load-branches
+ parameters:
+ -
+ name: github_app_id
+ in: path
+ description: 'GitHub App ID'
+ required: true
+ schema:
+ type: integer
+ -
+ name: owner
+ in: path
+ description: 'Repository owner'
+ required: true
+ schema:
+ type: string
+ -
+ name: repo
+ in: path
+ description: 'Repository name'
+ required: true
+ schema:
+ type: string
+ responses:
+ '200':
+ description: 'Branches loaded successfully.'
+ content:
+ application/json:
+ schema:
+ properties:
+ '': { type: array, items: { type: object } }
+ type: object
+ '400':
+ $ref: '#/components/responses/400'
+ '401':
+ $ref: '#/components/responses/401'
+ '404':
+ $ref: '#/components/responses/404'
+ security:
+ -
+ bearerAuth: []
+ '/github-apps/{github_app_id}':
+ delete:
+ tags:
+ - 'GitHub Apps'
+ summary: 'Delete GitHub App'
+ description: "Delete a GitHub app if it's not being used by any applications."
+ operationId: deleteGithubApp
+ parameters:
+ -
+ name: github_app_id
+ in: path
+ description: 'GitHub App ID'
+ required: true
+ schema:
+ type: integer
+ responses:
+ '200':
+ description: 'GitHub app deleted successfully'
+ content:
+ application/json:
+ schema:
+ properties:
+ message: { type: string, example: 'GitHub app deleted successfully' }
+ type: object
+ '401':
+ description: Unauthorized
+ '404':
+ description: 'GitHub app not found'
+ '409':
+ description: 'Conflict - GitHub app is in use'
+ content:
+ application/json:
+ schema:
+ properties:
+ message: { type: string, example: 'This GitHub app is being used by 5 application(s). Please delete all applications first.' }
+ type: object
+ security:
+ -
+ bearerAuth: []
+ patch:
+ tags:
+ - 'GitHub Apps'
+ summary: 'Update GitHub App'
+ description: 'Update an existing GitHub app.'
+ operationId: updateGithubApp
+ parameters:
+ -
+ name: github_app_id
+ in: path
+ description: 'GitHub App ID'
+ required: true
+ schema:
+ type: integer
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ properties:
+ name:
+ type: string
+ description: 'GitHub App name'
+ organization:
+ type: string
+ nullable: true
+ description: 'GitHub organization'
+ api_url:
+ type: string
+ description: 'GitHub API URL'
+ html_url:
+ type: string
+ description: 'GitHub HTML URL'
+ custom_user:
+ type: string
+ description: 'Custom user for SSH'
+ custom_port:
+ type: integer
+ description: 'Custom port for SSH'
+ app_id:
+ type: integer
+ description: 'GitHub App ID'
+ installation_id:
+ type: integer
+ description: 'GitHub Installation ID'
+ client_id:
+ type: string
+ description: 'GitHub Client ID'
+ client_secret:
+ type: string
+ description: 'GitHub Client Secret'
+ webhook_secret:
+ type: string
+ description: 'GitHub Webhook Secret'
+ private_key_uuid:
+ type: string
+ description: 'Private key UUID'
+ is_system_wide:
+ type: boolean
+ description: 'Is system wide (non-cloud instances only)'
+ type: object
+ responses:
+ '200':
+ description: 'GitHub app updated successfully'
+ content:
+ application/json:
+ schema:
+ properties:
+ message: { type: string, example: 'GitHub app updated successfully' }
+ data: { type: object, description: 'Updated GitHub app data' }
+ type: object
+ '401':
+ description: Unauthorized
+ '404':
+ description: 'GitHub app not found'
+ '422':
+ description: 'Validation error'
+ security:
+ -
+ bearerAuth: []
/version:
get:
summary: Version
@@ -5781,6 +6337,9 @@ tags:
-
name: Deployments
description: Deployments
+ -
+ name: 'GitHub Apps'
+ description: 'GitHub Apps'
-
name: Projects
description: Projects
diff --git a/other/nightly/docker-compose.prod.yml b/other/nightly/docker-compose.prod.yml
index 57f062202..b90f126a2 100644
--- a/other/nightly/docker-compose.prod.yml
+++ b/other/nightly/docker-compose.prod.yml
@@ -61,7 +61,7 @@ services:
retries: 10
timeout: 2s
soketi:
- image: '${REGISTRY_URL:-ghcr.io}/coollabsio/coolify-realtime:1.0.9'
+ image: '${REGISTRY_URL:-ghcr.io}/coollabsio/coolify-realtime:1.0.10'
ports:
- "${SOKETI_PORT:-6001}:6001"
- "6002:6002"
diff --git a/other/nightly/docker-compose.windows.yml b/other/nightly/docker-compose.windows.yml
index e19ec961f..09ce3ead3 100644
--- a/other/nightly/docker-compose.windows.yml
+++ b/other/nightly/docker-compose.windows.yml
@@ -103,7 +103,7 @@ services:
retries: 10
timeout: 2s
soketi:
- image: 'ghcr.io/coollabsio/coolify-realtime:1.0.0'
+ image: 'ghcr.io/coollabsio/coolify-realtime:1.0.10'
pull_policy: always
container_name: coolify-realtime
restart: always
diff --git a/other/nightly/install.sh b/other/nightly/install.sh
index 92ad12302..bcd37e71f 100755
--- a/other/nightly/install.sh
+++ b/other/nightly/install.sh
@@ -20,7 +20,6 @@ DATE=$(date +"%Y%m%d-%H%M%S")
OS_TYPE=$(grep -w "ID" /etc/os-release | cut -d "=" -f 2 | tr -d '"')
ENV_FILE="/data/coolify/source/.env"
-VERSION="21"
DOCKER_VERSION="27.0"
# TODO: Ask for a user
CURRENT_USER=$USER
@@ -32,7 +31,7 @@ fi
echo -e "Welcome to Coolify Installer!"
echo -e "This script will install everything for you. Sit back and relax."
-echo -e "Source code: https://github.com/coollabsio/coolify/blob/main/scripts/install.sh\n"
+echo -e "Source code: https://github.com/coollabsio/coolify/blob/v4.x/scripts/install.sh"
# Predefined root user
ROOT_USERNAME=${ROOT_USERNAME:-}
@@ -711,84 +710,80 @@ curl -fsSL $CDN/docker-compose.prod.yml -o /data/coolify/source/docker-compose.p
curl -fsSL $CDN/.env.production -o /data/coolify/source/.env.production
curl -fsSL $CDN/upgrade.sh -o /data/coolify/source/upgrade.sh
-echo -e "6. Make backup of .env to .env-$DATE"
+echo -e "6. Setting up environment variable file"
-# Copy .env.example if .env does not exist
-if [ -f $ENV_FILE ]; then
- cp $ENV_FILE $ENV_FILE-$DATE
+if [ -f "$ENV_FILE" ]; then
+ # If .env exists, create backup
+ echo " - Creating backup of existing .env file to .env-$DATE"
+ cp "$ENV_FILE" "$ENV_FILE-$DATE"
+ # Merge .env.production values into .env
+ echo " - Merging .env.production values into .env"
+ awk -F '=' '!seen[$1]++' "$ENV_FILE" "/data/coolify/source/.env.production" > "$ENV_FILE.tmp" && mv "$ENV_FILE.tmp" "$ENV_FILE"
+ echo " - .env file merged successfully"
else
- echo " - File does not exist: $ENV_FILE"
- echo " - Copying .env.production to .env-$DATE"
- cp /data/coolify/source/.env.production $ENV_FILE-$DATE
- # Generate a secure APP_ID and APP_KEY
- sed -i "s|^APP_ID=.*|APP_ID=$(openssl rand -hex 16)|" "$ENV_FILE-$DATE"
- sed -i "s|^APP_KEY=.*|APP_KEY=base64:$(openssl rand -base64 32)|" "$ENV_FILE-$DATE"
-
- # Generate a secure Postgres DB username and password
- # Causes issues: database "random-user" does not exist
- # sed -i "s|^DB_USERNAME=.*|DB_USERNAME=$(openssl rand -hex 16)|" "$ENV_FILE-$DATE"
- sed -i "s|^DB_PASSWORD=.*|DB_PASSWORD=$(openssl rand -base64 32)|" "$ENV_FILE-$DATE"
-
- # Generate a secure Redis password
- sed -i "s|^REDIS_PASSWORD=.*|REDIS_PASSWORD=$(openssl rand -base64 32)|" "$ENV_FILE-$DATE"
-
- # Generate secure Pusher credentials
- sed -i "s|^PUSHER_APP_ID=.*|PUSHER_APP_ID=$(openssl rand -hex 32)|" "$ENV_FILE-$DATE"
- sed -i "s|^PUSHER_APP_KEY=.*|PUSHER_APP_KEY=$(openssl rand -hex 32)|" "$ENV_FILE-$DATE"
- sed -i "s|^PUSHER_APP_SECRET=.*|PUSHER_APP_SECRET=$(openssl rand -hex 32)|" "$ENV_FILE-$DATE"
+ # If no .env exists, copy .env.production to .env
+ echo " - No .env file found, copying .env.production to .env"
+ cp "/data/coolify/source/.env.production" "$ENV_FILE"
fi
+echo -e "7. Checking and updating environment variables if necessary..."
+
+update_env_var() {
+ local key="$1"
+ local value="$2"
+
+ # If variable "key=" exists but has no value, update the value of the existing line
+ if grep -q "^${key}=$" "$ENV_FILE"; then
+ sed -i "s|^${key}=$|${key}=${value}|" "$ENV_FILE"
+ echo " - Updated value of ${key} as the current value was empty"
+ # If variable "key=" doesn't exist, append it to the file with value
+ elif ! grep -q "^${key}=" "$ENV_FILE"; then
+ printf '%s=%s\n' "$key" "$value" >>"$ENV_FILE"
+ echo " - Added ${key} and it's value as the variable was missing"
+ fi
+}
+
+update_env_var "APP_ID" "$(openssl rand -hex 16)"
+update_env_var "APP_KEY" "base64:$(openssl rand -base64 32)"
+# update_env_var "DB_USERNAME" "$(openssl rand -hex 16)" # Causes issues: database "random-user" does not exist
+update_env_var "DB_PASSWORD" "$(openssl rand -base64 32)"
+update_env_var "REDIS_PASSWORD" "$(openssl rand -base64 32)"
+update_env_var "PUSHER_APP_ID" "$(openssl rand -hex 32)"
+update_env_var "PUSHER_APP_KEY" "$(openssl rand -hex 32)"
+update_env_var "PUSHER_APP_SECRET" "$(openssl rand -hex 32)"
+
# Add default root user credentials from environment variables
if [ -n "$ROOT_USERNAME" ] && [ -n "$ROOT_USER_EMAIL" ] && [ -n "$ROOT_USER_PASSWORD" ]; then
- if grep -q "^ROOT_USERNAME=" "$ENV_FILE-$DATE"; then
- sed -i "s|^ROOT_USERNAME=.*|ROOT_USERNAME=$ROOT_USERNAME|" "$ENV_FILE-$DATE"
- fi
- if grep -q "^ROOT_USER_EMAIL=" "$ENV_FILE-$DATE"; then
- sed -i "s|^ROOT_USER_EMAIL=.*|ROOT_USER_EMAIL=$ROOT_USER_EMAIL|" "$ENV_FILE-$DATE"
- fi
- if grep -q "^ROOT_USER_PASSWORD=" "$ENV_FILE-$DATE"; then
- sed -i "s|^ROOT_USER_PASSWORD=.*|ROOT_USER_PASSWORD=$ROOT_USER_PASSWORD|" "$ENV_FILE-$DATE"
- fi
+ echo " - Setting predefined root user credentials from environment"
+ update_env_var "ROOT_USERNAME" "$ROOT_USERNAME"
+ update_env_var "ROOT_USER_EMAIL" "$ROOT_USER_EMAIL"
+ update_env_var "ROOT_USER_PASSWORD" "$ROOT_USER_PASSWORD"
fi
-# Add registry URL to .env file
if [ -n "${REGISTRY_URL+x}" ]; then
# Only update if REGISTRY_URL was explicitly provided
- if grep -q "^REGISTRY_URL=" "$ENV_FILE-$DATE"; then
- sed -i "s|^REGISTRY_URL=.*|REGISTRY_URL=$REGISTRY_URL|" "$ENV_FILE-$DATE"
- else
- echo "REGISTRY_URL=$REGISTRY_URL" >>"$ENV_FILE-$DATE"
- fi
+ update_env_var "REGISTRY_URL" "$REGISTRY_URL"
fi
-# Merge .env and .env.production. New values will be added to .env
-echo -e "7. Propagating .env with new values - if necessary."
-awk -F '=' '!seen[$1]++' "$ENV_FILE-$DATE" /data/coolify/source/.env.production >$ENV_FILE
-
if [ "$AUTOUPDATE" = "false" ]; then
- if ! grep -q "AUTOUPDATE=" /data/coolify/source/.env; then
- echo "AUTOUPDATE=false" >>/data/coolify/source/.env
- else
- sed -i "s|AUTOUPDATE=.*|AUTOUPDATE=false|g" /data/coolify/source/.env
+ update_env_var "AUTOUPDATE" "false"
+fi
+
+if [ "$DOCKER_POOL_BASE_PROVIDED" = true ]; then
+ update_env_var "DOCKER_ADDRESS_POOL_BASE" "$DOCKER_ADDRESS_POOL_BASE"
+else
+ # Add with default value if missing
+ if ! grep -q "^DOCKER_ADDRESS_POOL_BASE=" "$ENV_FILE"; then
+ update_env_var "DOCKER_ADDRESS_POOL_BASE" "$DOCKER_ADDRESS_POOL_BASE"
fi
fi
-# Save Docker address pool configuration to .env file
-if ! grep -q "DOCKER_ADDRESS_POOL_BASE=" /data/coolify/source/.env; then
- echo "DOCKER_ADDRESS_POOL_BASE=$DOCKER_ADDRESS_POOL_BASE" >>/data/coolify/source/.env
+if [ "$DOCKER_POOL_SIZE_PROVIDED" = true ]; then
+ update_env_var "DOCKER_ADDRESS_POOL_SIZE" "$DOCKER_ADDRESS_POOL_SIZE"
else
- # Only update if explicitly provided
- if [ "$DOCKER_POOL_BASE_PROVIDED" = true ]; then
- sed -i "s|DOCKER_ADDRESS_POOL_BASE=.*|DOCKER_ADDRESS_POOL_BASE=$DOCKER_ADDRESS_POOL_BASE|g" /data/coolify/source/.env
- fi
-fi
-
-if ! grep -q "DOCKER_ADDRESS_POOL_SIZE=" /data/coolify/source/.env; then
- echo "DOCKER_ADDRESS_POOL_SIZE=$DOCKER_ADDRESS_POOL_SIZE" >>/data/coolify/source/.env
-else
- # Only update if explicitly provided
- if [ "$DOCKER_POOL_SIZE_PROVIDED" = true ]; then
- sed -i "s|DOCKER_ADDRESS_POOL_SIZE=.*|DOCKER_ADDRESS_POOL_SIZE=$DOCKER_ADDRESS_POOL_SIZE|g" /data/coolify/source/.env
+ # Add with default value if missing
+ if ! grep -q "^DOCKER_ADDRESS_POOL_SIZE=" "$ENV_FILE"; then
+ update_env_var "DOCKER_ADDRESS_POOL_SIZE" "$DOCKER_ADDRESS_POOL_SIZE"
fi
fi
@@ -824,14 +819,13 @@ echo -e " - Please wait."
getAJoke
if [[ $- == *x* ]]; then
- bash -x /data/coolify/source/upgrade.sh "${LATEST_VERSION:-latest}" "${LATEST_HELPER_VERSION:-latest}" "${REGISTRY_URL:-ghcr.io}"
+ bash -x /data/coolify/source/upgrade.sh "${LATEST_VERSION:-latest}" "${LATEST_HELPER_VERSION:-latest}" "${REGISTRY_URL:-ghcr.io}" "true"
else
- bash /data/coolify/source/upgrade.sh "${LATEST_VERSION:-latest}" "${LATEST_HELPER_VERSION:-latest}" "${REGISTRY_URL:-ghcr.io}"
+ bash /data/coolify/source/upgrade.sh "${LATEST_VERSION:-latest}" "${LATEST_HELPER_VERSION:-latest}" "${REGISTRY_URL:-ghcr.io}" "true"
fi
echo " - Coolify installed successfully."
-rm -f $ENV_FILE-$DATE
-echo " - Waiting for 20 seconds for Coolify (database migrations) to be ready."
+echo " - Waiting 20 seconds for Coolify database migrations to complete."
getAJoke
sleep 20
@@ -868,5 +862,5 @@ if [ -n "$PRIVATE_IPS" ]; then
fi
done
fi
+
echo -e "\nWARNING: It is highly recommended to backup your Environment variables file (/data/coolify/source/.env) to a safe location, outside of this server (e.g. into a Password Manager).\n"
-cp /data/coolify/source/.env /data/coolify/source/.env.backup
diff --git a/other/nightly/upgrade.sh b/other/nightly/upgrade.sh
index 0b031ca75..14eede4ee 100644
--- a/other/nightly/upgrade.sh
+++ b/other/nightly/upgrade.sh
@@ -1,11 +1,12 @@
#!/bin/bash
## Do not modify this file. You will lose the ability to autoupdate!
-VERSION="15"
CDN="https://cdn.coollabs.io/coolify-nightly"
LATEST_IMAGE=${1:-latest}
LATEST_HELPER_VERSION=${2:-latest}
REGISTRY_URL=${3:-ghcr.io}
+SKIP_BACKUP=${4:-false}
+ENV_FILE="/data/coolify/source/.env"
DATE=$(date +%Y-%m-%d-%H-%M-%S)
LOGFILE="/data/coolify/source/upgrade-${DATE}.log"
@@ -14,20 +15,39 @@ curl -fsSL $CDN/docker-compose.yml -o /data/coolify/source/docker-compose.yml
curl -fsSL $CDN/docker-compose.prod.yml -o /data/coolify/source/docker-compose.prod.yml
curl -fsSL $CDN/.env.production -o /data/coolify/source/.env.production
-# Merge .env and .env.production. New values will be added to .env
-awk -F '=' '!seen[$1]++' /data/coolify/source/.env /data/coolify/source/.env.production >/data/coolify/source/.env.tmp && mv /data/coolify/source/.env.tmp /data/coolify/source/.env
-# Check if PUSHER_APP_ID or PUSHER_APP_KEY or PUSHER_APP_SECRET is empty in /data/coolify/source/.env
-if grep -q "PUSHER_APP_ID=$" /data/coolify/source/.env; then
- sed -i "s|PUSHER_APP_ID=.*|PUSHER_APP_ID=$(openssl rand -hex 32)|g" /data/coolify/source/.env
+# Backup existing .env file before making any changes
+if [ "$SKIP_BACKUP" != "true" ]; then
+ if [ -f "$ENV_FILE" ]; then
+ echo "Creating backup of existing .env file to .env-$DATE" >>"$LOGFILE"
+ cp "$ENV_FILE" "$ENV_FILE-$DATE"
+ else
+ echo "No existing .env file found to backup" >>"$LOGFILE"
+ fi
fi
-if grep -q "PUSHER_APP_KEY=$" /data/coolify/source/.env; then
- sed -i "s|PUSHER_APP_KEY=.*|PUSHER_APP_KEY=$(openssl rand -hex 32)|g" /data/coolify/source/.env
-fi
+echo "Merging .env.production values into .env" >>"$LOGFILE"
+awk -F '=' '!seen[$1]++' "$ENV_FILE" /data/coolify/source/.env.production > "$ENV_FILE.tmp" && mv "$ENV_FILE.tmp" "$ENV_FILE"
+echo ".env file merged successfully" >>"$LOGFILE"
-if grep -q "PUSHER_APP_SECRET=$" /data/coolify/source/.env; then
- sed -i "s|PUSHER_APP_SECRET=.*|PUSHER_APP_SECRET=$(openssl rand -hex 32)|g" /data/coolify/source/.env
-fi
+update_env_var() {
+ local key="$1"
+ local value="$2"
+
+ # If variable "key=" exists but has no value, update the value of the existing line
+ if grep -q "^${key}=$" "$ENV_FILE"; then
+ sed -i "s|^${key}=$|${key}=${value}|" "$ENV_FILE"
+ echo " - Updated value of ${key} as the current value was empty" >>"$LOGFILE"
+ # If variable "key=" doesn't exist, append it to the file with value
+ elif ! grep -q "^${key}=" "$ENV_FILE"; then
+ printf '%s=%s\n' "$key" "$value" >>"$ENV_FILE"
+ echo " - Added ${key} with default value as the variable was missing" >>"$LOGFILE"
+ fi
+}
+
+echo "Checking and updating environment variables if necessary..." >>"$LOGFILE"
+update_env_var "PUSHER_APP_ID" "$(openssl rand -hex 32)"
+update_env_var "PUSHER_APP_KEY" "$(openssl rand -hex 32)"
+update_env_var "PUSHER_APP_SECRET" "$(openssl rand -hex 32)"
# Make sure coolify network exists
# It is created when starting Coolify with docker compose
@@ -37,11 +57,16 @@ if ! docker network inspect coolify >/dev/null 2>&1; then
docker network create --attachable coolify 2>/dev/null
fi
fi
-# docker network create --attachable --driver=overlay coolify-overlay 2>/dev/null
+
+# Check if Docker config file exists
+DOCKER_CONFIG_MOUNT=""
+if [ -f /root/.docker/config.json ]; then
+ DOCKER_CONFIG_MOUNT="-v /root/.docker/config.json:/root/.docker/config.json"
+fi
if [ -f /data/coolify/source/docker-compose.custom.yml ]; then
- echo "docker-compose.custom.yml detected." >>$LOGFILE
- docker run -v /data/coolify/source:/data/coolify/source -v /var/run/docker.sock:/var/run/docker.sock --rm ${REGISTRY_URL:-ghcr.io}/coollabsio/coolify-helper:${LATEST_HELPER_VERSION} bash -c "LATEST_IMAGE=${LATEST_IMAGE} docker compose --env-file /data/coolify/source/.env -f /data/coolify/source/docker-compose.yml -f /data/coolify/source/docker-compose.prod.yml -f /data/coolify/source/docker-compose.custom.yml up -d --remove-orphans --force-recreate --wait --wait-timeout 60" >>$LOGFILE 2>&1
+ echo "docker-compose.custom.yml detected." >>"$LOGFILE"
+ docker run -v /data/coolify/source:/data/coolify/source -v /var/run/docker.sock:/var/run/docker.sock ${DOCKER_CONFIG_MOUNT} --rm ${REGISTRY_URL:-ghcr.io}/coollabsio/coolify-helper:${LATEST_HELPER_VERSION} bash -c "LATEST_IMAGE=${LATEST_IMAGE} docker compose --env-file /data/coolify/source/.env -f /data/coolify/source/docker-compose.yml -f /data/coolify/source/docker-compose.prod.yml -f /data/coolify/source/docker-compose.custom.yml up -d --remove-orphans --force-recreate --wait --wait-timeout 60" >>"$LOGFILE" 2>&1
else
- docker run -v /data/coolify/source:/data/coolify/source -v /var/run/docker.sock:/var/run/docker.sock --rm ${REGISTRY_URL:-ghcr.io}/coollabsio/coolify-helper:${LATEST_HELPER_VERSION} bash -c "LATEST_IMAGE=${LATEST_IMAGE} docker compose --env-file /data/coolify/source/.env -f /data/coolify/source/docker-compose.yml -f /data/coolify/source/docker-compose.prod.yml up -d --remove-orphans --force-recreate --wait --wait-timeout 60" >>$LOGFILE 2>&1
+ docker run -v /data/coolify/source:/data/coolify/source -v /var/run/docker.sock:/var/run/docker.sock ${DOCKER_CONFIG_MOUNT} --rm ${REGISTRY_URL:-ghcr.io}/coollabsio/coolify-helper:${LATEST_HELPER_VERSION} bash -c "LATEST_IMAGE=${LATEST_IMAGE} docker compose --env-file /data/coolify/source/.env -f /data/coolify/source/docker-compose.yml -f /data/coolify/source/docker-compose.prod.yml up -d --remove-orphans --force-recreate --wait --wait-timeout 60" >>"$LOGFILE" 2>&1
fi
diff --git a/other/nightly/versions.json b/other/nightly/versions.json
index fd5dccaf0..2e5cc5e84 100644
--- a/other/nightly/versions.json
+++ b/other/nightly/versions.json
@@ -1,10 +1,10 @@
{
"coolify": {
"v4": {
- "version": "4.0.0-beta.428"
+ "version": "4.0.0-beta.435"
},
"nightly": {
- "version": "4.0.0-beta.429"
+ "version": "4.0.0-beta.436"
},
"helper": {
"version": "1.0.11"
diff --git a/public/coolify-logo-dev-transparent.png b/public/coolify-logo-dev-transparent.png
index 9beeb9ba3..4e65e8b72 100644
Binary files a/public/coolify-logo-dev-transparent.png and b/public/coolify-logo-dev-transparent.png differ
diff --git a/public/coolify-logo-dev-transparent.svg b/public/coolify-logo-dev-transparent.svg
new file mode 100644
index 000000000..a4159154f
--- /dev/null
+++ b/public/coolify-logo-dev-transparent.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/public/coolify-logo-monochrome.png b/public/coolify-logo-monochrome.png
new file mode 100644
index 000000000..48605e8fd
Binary files /dev/null and b/public/coolify-logo-monochrome.png differ
diff --git a/public/coolify-logo-monochrome.svg b/public/coolify-logo-monochrome.svg
new file mode 100644
index 000000000..f60f33f97
--- /dev/null
+++ b/public/coolify-logo-monochrome.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/public/coolify-logo-red.png b/public/coolify-logo-red.png
new file mode 100644
index 000000000..b3f7d2b6c
Binary files /dev/null and b/public/coolify-logo-red.png differ
diff --git a/public/coolify-logo-red.svg b/public/coolify-logo-red.svg
new file mode 100644
index 000000000..4cbfef43f
--- /dev/null
+++ b/public/coolify-logo-red.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/public/coolify-logo.svg b/public/coolify-logo.svg
index 6f4f641f5..bff8f6b40 100644
--- a/public/coolify-logo.svg
+++ b/public/coolify-logo.svg
@@ -1,9 +1 @@
-
-
+
\ No newline at end of file
diff --git a/public/coolify-transparent.png b/public/coolify-transparent.png
index 96fc0db36..99a56acbe 100644
Binary files a/public/coolify-transparent.png and b/public/coolify-transparent.png differ
diff --git a/public/ente-photos-icon-green.png b/public/ente-photos-icon-green.png
new file mode 100644
index 000000000..b74aa472d
Binary files /dev/null and b/public/ente-photos-icon-green.png differ
diff --git a/public/svgs/ente-photos.svg b/public/svgs/ente-photos.svg
new file mode 100644
index 000000000..e6a469e91
--- /dev/null
+++ b/public/svgs/ente-photos.svg
@@ -0,0 +1,15 @@
+
diff --git a/public/svgs/ente.png b/public/svgs/ente.png
new file mode 100644
index 000000000..f510a7bf7
Binary files /dev/null and b/public/svgs/ente.png differ
diff --git a/resources/css/app.css b/resources/css/app.css
index 77fa2d66b..c1dc7e56d 100644
--- a/resources/css/app.css
+++ b/resources/css/app.css
@@ -20,8 +20,11 @@ @theme {
--color-warning: #fcd452;
--color-success: #16a34a;
--color-error: #dc2626;
+ --color-coollabs-50: #f5f0ff;
--color-coollabs: #6b16ed;
--color-coollabs-100: #7317ff;
+ --color-coollabs-200: #5a12c7;
+ --color-coollabs-300: #4a0fa3;
--color-coolgray-100: #181818;
--color-coolgray-200: #202020;
--color-coolgray-300: #242424;
@@ -91,11 +94,11 @@ option {
}
button[isError]:not(:disabled) {
- @apply text-white bg-red-600 hover:bg-red-700;
+ @apply text-red-800 dark:text-red-300 bg-red-50 dark:bg-red-900/30 border-red-300 dark:border-red-800 hover:bg-red-300 hover:text-white dark:hover:bg-red-800 dark:hover:text-white;
}
button[isHighlighted]:not(:disabled) {
- @apply text-white bg-coollabs hover:bg-coollabs-100;
+ @apply text-coollabs-200 dark:text-white bg-coollabs-50 dark:bg-coollabs/20 border-coollabs dark:border-coollabs-100 hover:bg-coollabs hover:text-white dark:hover:bg-coollabs-100 dark:hover:text-white;
}
h1 {
@@ -118,6 +121,11 @@ a {
@apply hover:text-black dark:hover:text-white;
}
+button:focus-visible,
+a:focus-visible {
+ @apply outline-none ring-2 ring-coollabs dark:ring-warning ring-offset-2 dark:ring-offset-coolgray-100;
+}
+
label {
@apply dark:text-neutral-400;
}
diff --git a/resources/css/utilities.css b/resources/css/utilities.css
index d09d7f49c..bedfb51bc 100644
--- a/resources/css/utilities.css
+++ b/resources/css/utilities.css
@@ -6,10 +6,31 @@ @utility apexcharts-tooltip-title {
@apply hidden!;
}
+@utility apexcharts-grid-borders {
+ @apply dark:hidden!;
+}
+
@utility apexcharts-xaxistooltip {
@apply hidden!;
}
+@utility apexcharts-tooltip-custom {
+ @apply bg-white dark:bg-coolgray-100 border border-neutral-200 dark:border-coolgray-300 rounded-lg shadow-lg p-3 text-sm;
+ min-width: 160px;
+}
+
+@utility apexcharts-tooltip-custom-value {
+ @apply text-neutral-700 dark:text-neutral-300 mb-1;
+}
+
+@utility apexcharts-tooltip-value-bold {
+ @apply font-bold text-black dark:text-white;
+}
+
+@utility apexcharts-tooltip-custom-title {
+ @apply text-xs text-neutral-500 dark:text-neutral-400 font-medium;
+}
+
@utility input-sticky {
@apply block py-1.5 w-full text-sm text-black rounded-sm border-0 ring-1 ring-inset dark:bg-coolgray-100 dark:text-white ring-neutral-200 dark:ring-coolgray-300 focus:ring-2 focus:ring-neutral-400 dark:focus:ring-coolgray-300;
}
@@ -42,7 +63,7 @@ @utility select {
}
@utility button {
- @apply flex gap-2 justify-center items-center px-2 py-1 text-sm text-black normal-case rounded-sm border outline-0 cursor-pointer bg-neutral-200/50 border-neutral-300 hover:bg-neutral-300 dark:bg-coolgray-200 dark:text-white dark:hover:text-white dark:hover:bg-coolgray-500 dark:border-coolgray-300 hover:text-black disabled:cursor-not-allowed min-w-fit dark:disabled:text-neutral-600 disabled:border-transparent disabled:hover:bg-transparent disabled:bg-transparent disabled:text-neutral-300;
+ @apply flex gap-2 justify-center items-center px-2 h-8 text-sm text-black normal-case rounded-sm border-2 outline-0 cursor-pointer font-medium bg-white border-neutral-200 hover:bg-neutral-100 dark:bg-coolgray-100 dark:text-white dark:hover:text-white dark:hover:bg-coolgray-200 dark:border-coolgray-300 hover:text-black disabled:cursor-not-allowed min-w-fit dark:disabled:text-neutral-600 disabled:border-transparent disabled:hover:bg-transparent disabled:bg-transparent disabled:text-neutral-300 focus-visible:ring-2 focus-visible:ring-coollabs dark:focus-visible:ring-warning focus-visible:ring-offset-2 dark:focus-visible:ring-offset-coolgray-100;
}
@utility alert-success {
@@ -62,11 +83,11 @@ @utility add-tag {
}
@utility dropdown-item {
- @apply flex relative gap-2 justify-start items-center py-1 pr-4 pl-2 w-full text-xs transition-colors cursor-pointer select-none dark:text-white hover:bg-neutral-100 dark:hover:bg-coollabs outline-none data-disabled:pointer-events-none data-disabled:opacity-50;
+ @apply flex relative gap-2 justify-start items-center py-1 pr-4 pl-2 w-full text-xs transition-colors cursor-pointer select-none dark:text-white hover:bg-neutral-100 dark:hover:bg-coollabs outline-none data-disabled:pointer-events-none data-disabled:opacity-50 focus-visible:bg-neutral-100 dark:focus-visible:bg-coollabs;
}
@utility dropdown-item-no-padding {
- @apply flex relative gap-2 justify-start items-center py-1 w-full text-xs transition-colors cursor-pointer select-none dark:text-white hover:bg-neutral-100 dark:hover:bg-coollabs outline-none data-disabled:pointer-events-none data-disabled:opacity-50;
+ @apply flex relative gap-2 justify-start items-center py-1 w-full text-xs transition-colors cursor-pointer select-none dark:text-white hover:bg-neutral-100 dark:hover:bg-coollabs outline-none data-disabled:pointer-events-none data-disabled:opacity-50 focus-visible:bg-neutral-100 dark:focus-visible:bg-coollabs;
}
@utility badge {
@@ -134,15 +155,15 @@ @utility kbd-custom {
}
@utility box {
- @apply relative flex lg:flex-row flex-col p-2 transition-colors cursor-pointer min-h-[4rem] dark:bg-coolgray-100 shadow-sm bg-white border text-black dark:text-white hover:text-black border-neutral-200 dark:border-black hover:bg-neutral-100 dark:hover:bg-coollabs-100 dark:hover:text-white hover:no-underline;
+ @apply relative flex lg:flex-row flex-col p-2 transition-colors cursor-pointer min-h-[4rem] dark:bg-coolgray-100 shadow-sm bg-white border text-black dark:text-white hover:text-black border-neutral-200 dark:border-coolgray-300 hover:bg-neutral-100 dark:hover:bg-coollabs-100 dark:hover:text-white hover:no-underline rounded-sm;
}
@utility box-boarding {
- @apply flex lg:flex-row flex-col p-2 transition-colors cursor-pointer min-h-[4rem] dark:bg-coolgray-100 dark:text-white bg-neutral-50 border border-neutral-200 dark:border-black hover:bg-neutral-100 dark:hover:bg-coollabs-100 dark:hover:text-white hover:text-black hover:no-underline text-black;
+ @apply flex lg:flex-row flex-col p-2 transition-colors cursor-pointer min-h-[4rem] dark:bg-coolgray-100 dark:text-white bg-neutral-50 border border-neutral-200 dark:border-coolgray-300 hover:bg-neutral-100 dark:hover:bg-coollabs-100 dark:hover:text-white hover:text-black hover:no-underline text-black rounded-sm;
}
@utility box-without-bg {
- @apply flex p-2 transition-colors dark:hover:text-white hover:no-underline min-h-[4rem] border border-neutral-200 dark:border-black;
+ @apply flex p-2 transition-colors dark:hover:text-white hover:no-underline min-h-[4rem] border border-neutral-200 dark:border-coolgray-300 rounded-sm;
}
@utility box-without-bg-without-border {
@@ -178,7 +199,7 @@ @utility info-helper {
}
@utility info-helper-popup {
- @apply hidden absolute z-40 text-xs rounded-sm text-neutral-700 group-hover:block dark:border-coolgray-500 border-neutral-900 dark:bg-coolgray-400 bg-neutral-200 dark:text-neutral-300;
+ @apply hidden absolute z-40 text-xs rounded-sm text-neutral-700 group-hover:block dark:border-coolgray-500 border-neutral-900 dark:bg-coolgray-400 bg-neutral-200 dark:text-neutral-300 max-w-xs whitespace-normal break-words;
}
@utility buyme {
diff --git a/resources/views/components/applications/advanced.blade.php b/resources/views/components/applications/advanced.blade.php
index 46ea54e99..e36583741 100644
--- a/resources/views/components/applications/advanced.blade.php
+++ b/resources/views/components/applications/advanced.blade.php
@@ -19,7 +19,7 @@
@else
-
-
-
Warning: Domain Conflict Detected
-
{{ $slot ?? 'The following domain(s) are already in use by other resources. Using the same domain for multiple resources can cause routing conflicts and unpredictable behavior.' }}
-
-
+
+ The following domain(s) are already in use by other resources. Using the same domain for
+ multiple resources can cause routing conflicts and unpredictable behavior.
+