-
Notifications
You must be signed in to change notification settings - Fork 4
Expand file tree
/
Copy path.env.example
More file actions
123 lines (87 loc) · 4.21 KB
/
.env.example
File metadata and controls
123 lines (87 loc) · 4.21 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
# APFlow Environment Variables Configuration
# Copy this file to .env and fill in your actual values
# =============================================================================
# API SERVER CONFIGURATION
# =============================================================================
# Host for the API server (default: 0.0.0.0)
APFLOW_API_HOST=0.0.0.0
# Port for the API server (default: 8000)
APFLOW_API_PORT=8000
# Enable system routes (default: true)
APFLOW_ENABLE_SYSTEM_ROUTES=true
# Enable API documentation (default: true)
APFLOW_ENABLE_DOCS=true
# =============================================================================
# AUTHENTICATION
# =============================================================================
# JWT secret key for token signing (REQUIRED for API authentication)
# Generate a secure random string for production
APFLOW_JWT_SECRET=your_jwt_secret_key_here
# JWT algorithm (default: HS256)
APFLOW_JWT_ALGORITHM=HS256
# =============================================================================
# DATABASE CONFIGURATION
# =============================================================================
# Database URL (supports PostgreSQL, DuckDB, etc.)
# For PostgreSQL: postgresql://user:password@localhost:5432/apflow
# For DuckDB: duckdb:///.data/apflow.duckdb
APFLOW_DATABASE_URL=duckdb:///.data/apflow.duckdb
# Alternative test database URL for APFlow tests
APFLOW_TEST_DATABASE_URL=
# Task table name in database (default: apflow_tasks)
APFLOW_TASK_TABLE_NAME=apflow_tasks
# Maximum number of database sessions (default: 50)
APFLOW_MAX_SESSIONS=50
# Database session timeout in seconds (default: 1800)
APFLOW_SESSION_TIMEOUT=1800
# =============================================================================
# LOGGING
# =============================================================================
# Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
APFLOW_LOG_LEVEL=INFO
# Alternative log level setting
LOG_LEVEL=INFO
# =============================================================================
# EXTENSIONS AND FEATURES
# =============================================================================
# Enabled extensions by directory name (comma-separated: stdio,http,crewai,llm,etc.)
# This loads ALL executors in the specified extension directories
# Examples:
# APFLOW_EXTENSIONS=stdio -> Loads system_info_executor, command_executor
# APFLOW_EXTENSIONS=stdio,http -> Loads all stdio and http executors
# APFLOW_EXTENSIONS=stdio,http,crewai -> Loads multiple extension directories
APFLOW_EXTENSIONS=stdio,http
# Enabled executors by specific ID (comma-separated: system_info_executor,rest_executor,etc.)
# This loads ONLY the specified executors, regardless of their extension directory
# Use this for fine-grained control when you want specific executors only
# Example:
# APFLOW_EXTENSIONS_IDS=system_info_executor,rest_executor
# APFLOW_EXTENSIONS_IDS=
# Note: Both APFLOW_EXTENSIONS and APFLOW_EXTENSIONS_IDS can be used together
# The result is the union of both configurations
# If neither is set, all available executors are enabled (no restrictions)
# Allowed commands for STDIO extension (comma-separated),
# Command execution is DISABLED by default for security reasons
APFLOW_STDIO_ALLOW_COMMAND=python,node,npm,yarn
# Custom task model class path
APFLOW_TASK_MODEL_CLASS=
# Allowlist of fields to keep from the link node itself
APFLOW_TASK_LINK_KEEP_FIELDS= # default: id,parent_id,user_id,task_tree_id,origin_type,created_at,updated_at
# =============================================================================
# LLM API KEYS
# =============================================================================
# OpenAI API key for LLM tasks
OPENAI_API_KEY=your_openai_api_key_here
# Anthropic API key for LLM tasks
ANTHROPIC_API_KEY=your_anthropic_api_key_here
# =============================================================================
# ALTERNATIVE/LEGACY SETTINGS
# =============================================================================
# Alternative API host setting,
# API_HOST=0.0.0.0
# Alternative API port setting
# API_PORT=8000
# Alternative database URL
# DATABASE_URL=
# Test database URL for running tests
# TEST_DATABASE_URL=duckdb:///.data/apflow.test.duckdb