Compare commits
11 commits
244f45d6c6
...
92cabddc3b
Author | SHA1 | Date | |
---|---|---|---|
92cabddc3b | |||
748b0b2c37 | |||
53673b596a | |||
d097d2bb78 | |||
01a6447c13 | |||
6fee098407 | |||
167a010332 | |||
f157eee07c | |||
dc65836b8b | |||
0764ca9bde | |||
0dc9e032d2 |
18 changed files with 455 additions and 55 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -9,3 +9,5 @@
|
||||||
/coffee_socialbox/logs/
|
/coffee_socialbox/logs/
|
||||||
/teapot_socialbox/data/
|
/teapot_socialbox/data/
|
||||||
/teapot_socialbox/logs/
|
/teapot_socialbox/logs/
|
||||||
|
/tests/docker/coffee/data/
|
||||||
|
/tests/docker/teapot/data/
|
||||||
|
|
|
@ -107,6 +107,8 @@ COPY docker/supervisord.conf /etc/supervisor/conf.d/supervisord.conf
|
||||||
# Copy docker.conf & zz-docker.conf for PHP-FPM
|
# Copy docker.conf & zz-docker.conf for PHP-FPM
|
||||||
COPY docker/docker.conf /usr/local/etc/php-fpm.d/docker.conf
|
COPY docker/docker.conf /usr/local/etc/php-fpm.d/docker.conf
|
||||||
COPY docker/zz-docker.conf /usr/local/etc/php-fpm.d/zz-docker.conf
|
COPY docker/zz-docker.conf /usr/local/etc/php-fpm.d/zz-docker.conf
|
||||||
|
# Copy the logging server script over
|
||||||
|
COPY docker/logger.py /logger.py
|
||||||
|
|
||||||
# Configure php.ini and enable error and log it to /var/log rather than stdout
|
# Configure php.ini and enable error and log it to /var/log rather than stdout
|
||||||
RUN cp /usr/local/etc/php/php.ini-production /usr/local/etc/php/php.ini && \
|
RUN cp /usr/local/etc/php/php.ini-production /usr/local/etc/php/php.ini && \
|
||||||
|
|
|
@ -1,3 +1,52 @@
|
||||||
<?php
|
<?php
|
||||||
|
|
||||||
|
/** @noinspection PhpDefineCanBeReplacedWithConstInspection */
|
||||||
|
|
||||||
|
use Socialbox\Classes\ServerResolver;
|
||||||
|
|
||||||
require 'ncc';
|
require 'ncc';
|
||||||
import('net.nosial.socialbox');
|
import('net.nosial.socialbox');
|
||||||
|
|
||||||
|
|
||||||
|
// Definitions for the test environment
|
||||||
|
if(!defined('SB_TEST'))
|
||||||
|
{
|
||||||
|
$dockerTestPath = __DIR__ . DIRECTORY_SEPARATOR . 'tests' . DIRECTORY_SEPARATOR . 'docker' . DIRECTORY_SEPARATOR;
|
||||||
|
$helperClassPath = __DIR__ . DIRECTORY_SEPARATOR . 'tests' . DIRECTORY_SEPARATOR . 'Helper.php';
|
||||||
|
|
||||||
|
if(!file_exists($dockerTestPath))
|
||||||
|
{
|
||||||
|
throw new RuntimeException('Docker test path not found: ' . $dockerTestPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
if(!file_exists($helperClassPath))
|
||||||
|
{
|
||||||
|
throw new RuntimeException('Helper class not found: ' . $helperClassPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
require $helperClassPath;
|
||||||
|
|
||||||
|
// global
|
||||||
|
define('SB_TEST', 1);
|
||||||
|
putenv('LOG_LEVEL=debug');
|
||||||
|
|
||||||
|
// coffee.com
|
||||||
|
define('COFFEE_DOMAIN', 'coffee.com');
|
||||||
|
define('COFFEE_RPC_HOST', '127.0.0.0');
|
||||||
|
define('COFFEE_RPC_PORT', 8086);
|
||||||
|
define('COFFEE_RPC_SSL', false);
|
||||||
|
define('COFFEE_PUBLIC_KEY', file_get_contents($dockerTestPath . 'coffee' . DIRECTORY_SEPARATOR . 'signature.pub'));
|
||||||
|
define('COFFEE_PRIVATE_KEY', file_get_contents($dockerTestPath . 'coffee' . DIRECTORY_SEPARATOR . 'signature.pk'));
|
||||||
|
|
||||||
|
// teapot.com
|
||||||
|
define('TEAPOT_DOMAIN', 'teapot.com');
|
||||||
|
define('TEAPOT_RPC_HOST', '127.0.0.0');
|
||||||
|
define('TEAPOT_RPC_PORT', 8087);
|
||||||
|
define('TEAPOT_RPC_SSL', false);
|
||||||
|
define('TEAPOT_PUBLIC_KEY', file_get_contents($dockerTestPath . 'teapot' . DIRECTORY_SEPARATOR . 'signature.pub'));
|
||||||
|
define('TEAPOT_PRIVATE_KEY', file_get_contents($dockerTestPath . 'teapot' . DIRECTORY_SEPARATOR . 'signature.pk'));
|
||||||
|
|
||||||
|
// Define mocked dns server records for testing purposes
|
||||||
|
ServerResolver::addMock(COFFEE_DOMAIN, sprintf('v=socialbox;sb-rpc=%s://%s:%d/;sb-key=%s;sb-exp=0', (COFFEE_RPC_SSL ? 'https' : 'http'), COFFEE_RPC_HOST, COFFEE_RPC_PORT, COFFEE_PUBLIC_KEY));
|
||||||
|
ServerResolver::addMock(TEAPOT_DOMAIN, sprintf('v=socialbox;sb-rpc=%s://%s:%d/;sb-key=%s;sb-exp=0', (TEAPOT_RPC_SSL ? 'https' : 'http'), TEAPOT_RPC_HOST, TEAPOT_RPC_PORT, TEAPOT_PUBLIC_KEY));
|
||||||
|
}
|
|
@ -22,8 +22,8 @@ services:
|
||||||
- shared_network
|
- shared_network
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- ./coffee_socialbox/config:/etc/config
|
- ./tests/docker/coffee/config:/etc/config
|
||||||
- ./coffee_socialbox/data:/etc/socialbox
|
- ./tests/docker/coffee/data:/etc/socialbox
|
||||||
environment:
|
environment:
|
||||||
# No need to change these values
|
# No need to change these values
|
||||||
LOG_LEVEL: ${LOG_LEVEL:-debug}
|
LOG_LEVEL: ${LOG_LEVEL:-debug}
|
||||||
|
@ -132,8 +132,8 @@ services:
|
||||||
- shared_network
|
- shared_network
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- ./teapot_socialbox/config:/etc/config
|
- ./tests/docker/teapot/config:/etc/config
|
||||||
- ./teapot_socialbox/data:/etc/socialbox
|
- ./tests/docker/teapot/data:/etc/socialbox
|
||||||
environment:
|
environment:
|
||||||
# No need to change these values
|
# No need to change these values
|
||||||
LOG_LEVEL: ${LOG_LEVEL:-debug}
|
LOG_LEVEL: ${LOG_LEVEL:-debug}
|
||||||
|
|
345
docker/logger.py
Normal file
345
docker/logger.py
Normal file
|
@ -0,0 +1,345 @@
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import socket
|
||||||
|
import threading
|
||||||
|
from datetime import datetime
|
||||||
|
from queue import Queue, Empty
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Optional, Dict, Any, List
|
||||||
|
from dataclasses import dataclass
|
||||||
|
import colorama
|
||||||
|
from colorama import Fore, Back, Style
|
||||||
|
|
||||||
|
# Initialize colorama for cross-platform color support
|
||||||
|
colorama.init()
|
||||||
|
|
||||||
|
|
||||||
|
class LogLevel(str, Enum):
|
||||||
|
DEBUG = "DBG"
|
||||||
|
VERBOSE = "VRB"
|
||||||
|
INFO = "INFO"
|
||||||
|
WARNING = "WRN"
|
||||||
|
ERROR = "ERR"
|
||||||
|
CRITICAL = "CRT"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def to_python_level(cls, level: str) -> int:
|
||||||
|
return {
|
||||||
|
cls.DEBUG: logging.DEBUG,
|
||||||
|
cls.VERBOSE: logging.DEBUG,
|
||||||
|
cls.INFO: logging.INFO,
|
||||||
|
cls.WARNING: logging.WARNING,
|
||||||
|
cls.ERROR: logging.ERROR,
|
||||||
|
cls.CRITICAL: logging.CRITICAL
|
||||||
|
}.get(level, logging.INFO)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_color(cls, level: str) -> str:
|
||||||
|
return {
|
||||||
|
cls.DEBUG: Fore.CYAN,
|
||||||
|
cls.VERBOSE: Fore.BLUE,
|
||||||
|
cls.INFO: Fore.GREEN,
|
||||||
|
cls.WARNING: Fore.YELLOW,
|
||||||
|
cls.ERROR: Fore.RED,
|
||||||
|
cls.CRITICAL: Fore.RED + Back.WHITE
|
||||||
|
}.get(level, Fore.WHITE)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class StackFrame:
|
||||||
|
file: Optional[str]
|
||||||
|
line: Optional[int]
|
||||||
|
function: Optional[str]
|
||||||
|
args: Optional[List[Any]]
|
||||||
|
class_name: Optional[str]
|
||||||
|
call_type: str = 'static'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, data: Dict[str, Any]) -> 'StackFrame':
|
||||||
|
return cls(
|
||||||
|
file=str(data.get('file')) if data.get('file') else None,
|
||||||
|
line=int(data['line']) if data.get('line') is not None else None,
|
||||||
|
function=str(data.get('function')) if data.get('function') else None,
|
||||||
|
args=data.get('args'),
|
||||||
|
class_name=str(data.get('class')) if data.get('class') else None,
|
||||||
|
call_type=str(data.get('callType', 'static'))
|
||||||
|
)
|
||||||
|
|
||||||
|
def format(self) -> str:
|
||||||
|
location = f"{self.file or '?'}:{self.line or '?'}"
|
||||||
|
if self.class_name:
|
||||||
|
call = f"{self.class_name}{self.call_type}{self.function or ''}"
|
||||||
|
else:
|
||||||
|
call = self.function or ''
|
||||||
|
|
||||||
|
args_str = ""
|
||||||
|
if self.args:
|
||||||
|
args_str = f"({', '.join(str(arg) for arg in self.args)})"
|
||||||
|
|
||||||
|
return f"{Fore.BLUE}{call}{Style.RESET_ALL}{args_str} in {Fore.CYAN}{location}{Style.RESET_ALL}"
|
||||||
|
|
||||||
|
|
||||||
|
class ExceptionDetails:
|
||||||
|
def __init__(self, name: str, message: str, code: Optional[int],
|
||||||
|
file: Optional[str], line: Optional[int],
|
||||||
|
trace: List[StackFrame], previous: Optional['ExceptionDetails']):
|
||||||
|
self.name = name
|
||||||
|
self.message = message
|
||||||
|
self.code = code
|
||||||
|
self.file = file
|
||||||
|
self.line = line
|
||||||
|
self.trace = trace
|
||||||
|
self.previous = previous
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, data: Dict[str, Any]) -> Optional['ExceptionDetails']:
|
||||||
|
if not data:
|
||||||
|
return None
|
||||||
|
|
||||||
|
trace = []
|
||||||
|
if 'trace' in data and isinstance(data['trace'], list):
|
||||||
|
trace = [StackFrame.from_dict(frame) for frame in data['trace']
|
||||||
|
if isinstance(frame, dict)]
|
||||||
|
|
||||||
|
previous = None
|
||||||
|
if 'previous' in data and isinstance(data['previous'], dict):
|
||||||
|
previous = cls.from_dict(data['previous'])
|
||||||
|
|
||||||
|
return cls(
|
||||||
|
name=str(data.get('name', '')),
|
||||||
|
message=str(data.get('message', '')),
|
||||||
|
code=int(data['code']) if data.get('code') is not None else None,
|
||||||
|
file=str(data.get('file')) if data.get('file') else None,
|
||||||
|
line=int(data['line']) if data.get('line') is not None else None,
|
||||||
|
trace=trace,
|
||||||
|
previous=previous
|
||||||
|
)
|
||||||
|
|
||||||
|
def format(self, level: int = 0) -> str:
|
||||||
|
indent = " " * level
|
||||||
|
parts = []
|
||||||
|
|
||||||
|
# Exception header
|
||||||
|
header = f"{indent}{Fore.RED}{self.name}"
|
||||||
|
if self.code is not None and 0:
|
||||||
|
header += f" {Fore.YELLOW}:{self.code}{Style.RESET_ALL}"
|
||||||
|
|
||||||
|
# Message
|
||||||
|
header += f"{Fore.WHITE}:{Style.RESET_ALL} {self.message}"
|
||||||
|
|
||||||
|
# Location
|
||||||
|
if self.file and self.line:
|
||||||
|
header += f"{Fore.WHITE} at {Style.RESET_ALL}{self.file}:{self.line}"
|
||||||
|
|
||||||
|
parts.append(header)
|
||||||
|
|
||||||
|
# Stack trace
|
||||||
|
if self.trace:
|
||||||
|
parts.append(f"{indent}{Fore.WHITE}Stack trace:{Style.RESET_ALL}")
|
||||||
|
for frame in self.trace:
|
||||||
|
parts.append(f"{indent} → {frame.format()}")
|
||||||
|
|
||||||
|
# Previous exception
|
||||||
|
if self.previous:
|
||||||
|
parts.append(f"{indent}{Fore.YELLOW}Caused by:{Style.RESET_ALL}")
|
||||||
|
parts.append(self.previous.format(level + 1))
|
||||||
|
|
||||||
|
return "\n".join(parts)
|
||||||
|
|
||||||
|
|
||||||
|
class ColoredLogger(logging.Logger):
|
||||||
|
def __init__(self, name: str):
|
||||||
|
super().__init__(name)
|
||||||
|
self.formatter = logging.Formatter(
|
||||||
|
f'%(asctime)s {Fore.WHITE}[%(levelname)s]{Style.RESET_ALL} %(message)s',
|
||||||
|
datefmt='%Y-%m-%d %H:%M:%S'
|
||||||
|
)
|
||||||
|
|
||||||
|
console_handler = logging.StreamHandler()
|
||||||
|
console_handler.setFormatter(self.formatter)
|
||||||
|
self.addHandler(console_handler)
|
||||||
|
|
||||||
|
|
||||||
|
class MultiProtocolServer:
|
||||||
|
def __init__(self, host: str, port: int, working_directory: str):
|
||||||
|
self.host = host
|
||||||
|
self.port = port
|
||||||
|
self.working_directory = working_directory
|
||||||
|
self.log_queue: Queue = Queue()
|
||||||
|
self.current_date = datetime.now().strftime('%Y-%m-%d')
|
||||||
|
self.log_file = None
|
||||||
|
self.stop_event = threading.Event()
|
||||||
|
|
||||||
|
os.makedirs(self.working_directory, exist_ok=True)
|
||||||
|
|
||||||
|
# Set up colored logging
|
||||||
|
logging.setLoggerClass(ColoredLogger)
|
||||||
|
self.logger = logging.getLogger("MultiProtocolServer")
|
||||||
|
self.logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
def _handle_log_event(self, data: Dict[str, Any], address: tuple) -> None:
|
||||||
|
"""Process and format a structured log event with colors and proper formatting."""
|
||||||
|
try:
|
||||||
|
app_name = data.get('application_name', 'Unknown')
|
||||||
|
timestamp = data.get('timestamp')
|
||||||
|
if timestamp:
|
||||||
|
try:
|
||||||
|
timestamp = datetime.fromtimestamp(int(timestamp))
|
||||||
|
timestamp = timestamp.strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
else:
|
||||||
|
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
|
||||||
|
level = data.get('level', 'INFO')
|
||||||
|
message = data.get('message', '')
|
||||||
|
|
||||||
|
# Format the log message with colors
|
||||||
|
color = LogLevel.get_color(level)
|
||||||
|
log_message = f"{color}[{app_name}]{Style.RESET_ALL} {message}"
|
||||||
|
|
||||||
|
# Handle exception if present
|
||||||
|
exception_data = data.get('exception')
|
||||||
|
if exception_data:
|
||||||
|
exception = ExceptionDetails.from_dict(exception_data)
|
||||||
|
if exception:
|
||||||
|
log_message += f"\n{exception.format()}"
|
||||||
|
|
||||||
|
# Log with appropriate level
|
||||||
|
python_level = LogLevel.to_python_level(level)
|
||||||
|
self.logger.log(python_level, log_message)
|
||||||
|
|
||||||
|
# Add to log queue for file logging
|
||||||
|
self.log_queue.put({
|
||||||
|
"timestamp": timestamp,
|
||||||
|
"address": address,
|
||||||
|
"data": data
|
||||||
|
})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error processing log event: {e}", exc_info=True)
|
||||||
|
|
||||||
|
def _handle_data(self, data: bytes, address: tuple) -> None:
|
||||||
|
"""Process incoming data and attempt to parse as JSON."""
|
||||||
|
try:
|
||||||
|
decoded_data = data.decode('utf-8').strip()
|
||||||
|
|
||||||
|
try:
|
||||||
|
json_data = json.loads(decoded_data)
|
||||||
|
# Handle structured log event
|
||||||
|
self._handle_log_event(json_data, address)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
# Log raw data if not valid JSON
|
||||||
|
self.logger.info(f"Received non-JSON data from {address}: {decoded_data}")
|
||||||
|
self.log_queue.put({
|
||||||
|
"timestamp": datetime.now().isoformat(),
|
||||||
|
"address": address,
|
||||||
|
"data": decoded_data
|
||||||
|
})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Data handling error: {e}")
|
||||||
|
|
||||||
|
# Rest of the class remains the same...
|
||||||
|
def _get_log_file(self):
|
||||||
|
date = datetime.now().strftime('%Y-%m-%d')
|
||||||
|
if date != self.current_date or self.log_file is None:
|
||||||
|
if self.log_file:
|
||||||
|
self.log_file.close()
|
||||||
|
self.current_date = date
|
||||||
|
filename = os.path.join(self.working_directory, f"log{date}.jsonl")
|
||||||
|
self.log_file = open(filename, 'a')
|
||||||
|
return self.log_file
|
||||||
|
|
||||||
|
def _log_writer(self):
|
||||||
|
while not self.stop_event.is_set() or not self.log_queue.empty():
|
||||||
|
try:
|
||||||
|
data = self.log_queue.get(timeout=1)
|
||||||
|
log_file = self._get_log_file()
|
||||||
|
json.dump(data, log_file)
|
||||||
|
log_file.write('\n')
|
||||||
|
log_file.flush()
|
||||||
|
except Empty:
|
||||||
|
continue
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error writing to log file: {e}")
|
||||||
|
|
||||||
|
def _handle_tcp_client(self, client_socket, address):
|
||||||
|
self.logger.info(f"TCP connection established from {address}")
|
||||||
|
try:
|
||||||
|
with client_socket:
|
||||||
|
while True:
|
||||||
|
data = client_socket.recv(4096)
|
||||||
|
if not data:
|
||||||
|
break
|
||||||
|
self._handle_data(data, address)
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"TCP client error: {e}")
|
||||||
|
self.logger.info(f"TCP connection closed from {address}")
|
||||||
|
|
||||||
|
def _handle_udp_client(self, data, address):
|
||||||
|
try:
|
||||||
|
self._handle_data(data, address)
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"UDP client error: {e}")
|
||||||
|
|
||||||
|
def _start_tcp_server(self):
|
||||||
|
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as tcp_socket:
|
||||||
|
tcp_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
tcp_socket.bind((self.host, self.port))
|
||||||
|
tcp_socket.listen(5)
|
||||||
|
self.logger.debug(f"TCP server running on {self.host}:{self.port}")
|
||||||
|
|
||||||
|
while not self.stop_event.is_set():
|
||||||
|
try:
|
||||||
|
client_socket, address = tcp_socket.accept()
|
||||||
|
threading.Thread(target=self._handle_tcp_client,
|
||||||
|
args=(client_socket, address),
|
||||||
|
daemon=True).start()
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"TCP server error: {e}")
|
||||||
|
|
||||||
|
def _start_udp_server(self):
|
||||||
|
with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as udp_socket:
|
||||||
|
udp_socket.bind((self.host, self.port))
|
||||||
|
self.logger.debug(f"UDP server running on {self.host}:{self.port}")
|
||||||
|
|
||||||
|
while not self.stop_event.is_set():
|
||||||
|
try:
|
||||||
|
data, address = udp_socket.recvfrom(4096)
|
||||||
|
self._handle_udp_client(data, address)
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"UDP server error: {e}")
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
self.logger.info("Starting MultiProtocolServer...")
|
||||||
|
threading.Thread(target=self._log_writer, daemon=True).start()
|
||||||
|
tcp_thread = threading.Thread(target=self._start_tcp_server, daemon=True)
|
||||||
|
udp_thread = threading.Thread(target=self._start_udp_server, daemon=True)
|
||||||
|
tcp_thread.start()
|
||||||
|
udp_thread.start()
|
||||||
|
|
||||||
|
try:
|
||||||
|
tcp_thread.join()
|
||||||
|
udp_thread.join()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
self.stop()
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self.logger.info("Stopping Logging Server...")
|
||||||
|
self.stop_event.set()
|
||||||
|
if self.log_file:
|
||||||
|
self.log_file.close()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = argparse.ArgumentParser(description="Logging Server")
|
||||||
|
parser.add_argument("-p", "--port", type=int, default=8080,
|
||||||
|
help="Port to listen on")
|
||||||
|
parser.add_argument("-w", "--working-directory", type=str,
|
||||||
|
default="./logs", help="Directory to store log files")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
server = MultiProtocolServer("0.0.0.0", args.port, args.working_directory)
|
||||||
|
server.start()
|
|
@ -10,6 +10,20 @@ nodaemon=true
|
||||||
minfds=1024
|
minfds=1024
|
||||||
minprocs=200
|
minprocs=200
|
||||||
|
|
||||||
|
[program:logger]
|
||||||
|
command=python3 -m /logger.py --port 5131
|
||||||
|
autostart=true
|
||||||
|
autorestart=true
|
||||||
|
priority=1
|
||||||
|
stdout_events_enabled=true
|
||||||
|
stderr_events_enabled=true
|
||||||
|
stdout_logfile=/var/log/socialbox.log
|
||||||
|
stderr_logfile=/var/log/socialbox_error.log
|
||||||
|
stdout_logfile_maxbytes=20MB
|
||||||
|
stdout_logfile_backups=5
|
||||||
|
stderr_logfile_maxbytes=20MB
|
||||||
|
stderr_logfile_backups=5
|
||||||
|
|
||||||
[program:php-fpm]
|
[program:php-fpm]
|
||||||
command=/usr/local/sbin/php-fpm --nodaemonize
|
command=/usr/local/sbin/php-fpm --nodaemonize
|
||||||
autostart=true
|
autostart=true
|
||||||
|
@ -22,11 +36,6 @@ stdout_logfile_backups=5
|
||||||
stderr_logfile_maxbytes=0
|
stderr_logfile_maxbytes=0
|
||||||
stderr_logfile_backups=5
|
stderr_logfile_backups=5
|
||||||
|
|
||||||
[program:php-fpm-log]
|
|
||||||
command=tail -f /var/log/fpm.log /var/log/fpm_error.log
|
|
||||||
stdout_events_enabled=true
|
|
||||||
stderr_events_enabled=true
|
|
||||||
|
|
||||||
[program:nginx]
|
[program:nginx]
|
||||||
command=/usr/sbin/nginx -g "daemon off;" -c /etc/nginx/nginx.conf
|
command=/usr/sbin/nginx -g "daemon off;" -c /etc/nginx/nginx.conf
|
||||||
autostart=true
|
autostart=true
|
||||||
|
|
|
@ -6,6 +6,5 @@
|
||||||
</testsuites>
|
</testsuites>
|
||||||
<php>
|
<php>
|
||||||
<ini name="error_reporting" value="-1"/>
|
<ini name="error_reporting" value="-1"/>
|
||||||
<server name="KERNEL_DIR" value="app/"/>
|
|
||||||
</php>
|
</php>
|
||||||
</phpunit>
|
</phpunit>
|
||||||
|
|
26
tests/Helper.php
Normal file
26
tests/Helper.php
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
<?php
|
||||||
|
|
||||||
|
class Helper
|
||||||
|
{
|
||||||
|
/**
|
||||||
|
* Generates a random username based on the given domain.
|
||||||
|
*
|
||||||
|
* @param string $domain The domain to be appended to the generated username.
|
||||||
|
* @param int $length The length of the random string.
|
||||||
|
* @param string $prefix The prefix to be appended to the generated username.
|
||||||
|
* @return string Returns a randomly generated username in the format 'user<randomString>@<domain>'.
|
||||||
|
*/
|
||||||
|
public static function generateRandomPeer(string $domain, int $length=16, string $prefix='userTest'): string
|
||||||
|
{
|
||||||
|
$characters = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
|
||||||
|
$charactersLength = strlen($characters);
|
||||||
|
$randomString = '';
|
||||||
|
|
||||||
|
for ($i = 0; $i < $length; $i++)
|
||||||
|
{
|
||||||
|
$randomString .= $characters[rand(0, $charactersLength - 1)];
|
||||||
|
}
|
||||||
|
|
||||||
|
return sprintf('%s%s@%s', $prefix, $randomString, $domain);
|
||||||
|
}
|
||||||
|
}
|
|
@ -101,7 +101,6 @@
|
||||||
$flags = [];
|
$flags = [];
|
||||||
|
|
||||||
$result = SessionFlags::isComplete($flags);
|
$result = SessionFlags::isComplete($flags);
|
||||||
|
|
||||||
$this->assertTrue($result);
|
$this->assertTrue($result);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
9
tests/Socialbox/SocialClientSessionTest.php
Normal file
9
tests/Socialbox/SocialClientSessionTest.php
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
<?php
|
||||||
|
|
||||||
|
namespace Socialbox;
|
||||||
|
|
||||||
|
use PHPUnit\Framework\TestCase;
|
||||||
|
|
||||||
|
class SocialClientSessionTest extends TestCase
|
||||||
|
{
|
||||||
|
}
|
1
tests/docker/coffee/signature.pk
Normal file
1
tests/docker/coffee/signature.pk
Normal file
|
@ -0,0 +1 @@
|
||||||
|
sig:tTVe59Ko5XuwgS8PneR92FAOqbgSHTKYn8U-lQRB9KODn0J_yPXCZCZGDUyS95hul2Jn7X7-EVT15FEmZADCZw
|
1
tests/docker/coffee/signature.pub
Normal file
1
tests/docker/coffee/signature.pub
Normal file
|
@ -0,0 +1 @@
|
||||||
|
sig:g59Cf8j1wmQmRg1MkveYbpdiZ-1-_hFU9eRRJmQAwmc
|
1
tests/docker/teapot/signature.pk
Normal file
1
tests/docker/teapot/signature.pk
Normal file
|
@ -0,0 +1 @@
|
||||||
|
sig:kPfGxpsnisJIp5pKuD1AI7-T1bLk1S-EGOr7jBq5AO4wNdS6uKkCj8gC_4RlMSgWGkh2GxfF8ws26dKdDPFiJg
|
1
tests/docker/teapot/signature.pub
Normal file
1
tests/docker/teapot/signature.pub
Normal file
|
@ -0,0 +1 @@
|
||||||
|
sig:MDXUuripAo_IAv-EZTEoFhpIdhsXxfMLNunSnQzxYiY
|
|
@ -1,11 +0,0 @@
|
||||||
< {%
|
|
||||||
import {randomCrc32String} from "./utilities.js";
|
|
||||||
request.variables.set("id", randomCrc32String());
|
|
||||||
%}
|
|
||||||
POST http://172.27.7.211/
|
|
||||||
Content-Type: application/json
|
|
||||||
|
|
||||||
{
|
|
||||||
"method": "ping",
|
|
||||||
"id": "{{id}}"
|
|
||||||
}
|
|
|
@ -1,33 +0,0 @@
|
||||||
export function crc32(str) {
|
|
||||||
var crcTable = [];
|
|
||||||
for (var i = 0; i < 256; i++) {
|
|
||||||
var crc = i;
|
|
||||||
for (var j = 8; j > 0; j--) {
|
|
||||||
if (crc & 1) {
|
|
||||||
crc = (crc >>> 1) ^ 0xEDB88320;
|
|
||||||
} else {
|
|
||||||
crc = crc >>> 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
crcTable[i] = crc;
|
|
||||||
}
|
|
||||||
|
|
||||||
var crc32val = 0xFFFFFFFF;
|
|
||||||
for (var i = 0; i < str.length; i++) {
|
|
||||||
var charCode = str.charCodeAt(i);
|
|
||||||
crc32val = (crc32val >>> 8) ^ crcTable[(crc32val ^ charCode) & 0xFF];
|
|
||||||
}
|
|
||||||
|
|
||||||
return (crc32val ^ 0xFFFFFFFF) >>> 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function randomCrc32String(length = 8) {
|
|
||||||
var characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
|
|
||||||
var randomStr = '';
|
|
||||||
for (var i = 0; i < length; i++) {
|
|
||||||
randomStr += characters.charAt(Math.floor(Math.random() * characters.length));
|
|
||||||
}
|
|
||||||
return crc32(randomStr).toString(16); // Convert to hexadecimal string
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log(randomCrc32String()); // Example usage
|
|
Loading…
Add table
Reference in a new issue