init
This commit is contained in:
parent
18dbc71916
commit
2346a426a2
16
.env.example
16
.env.example
@ -1,16 +0,0 @@
|
||||
# API Settings
|
||||
APP_VERSION=1.0.0
|
||||
ENV=dev
|
||||
|
||||
# Server Settings
|
||||
UVICORN_HOST=0.0.0.0
|
||||
UVICORN_PORT=8888
|
||||
|
||||
# CORS Settings
|
||||
BACKEND_CORS_ORIGINS=http://localhost:3000,http://localhost:8080,http://localhost:5173
|
||||
|
||||
# Application Settings
|
||||
PROJECT_NAME=freeleaps-authentication
|
||||
|
||||
# Logging
|
||||
LOGGING_LEVEL=INFO
|
||||
@ -1,32 +0,0 @@
|
||||
# Use Python 3.10 slim image as base
|
||||
FROM python:3.10-slim
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Set environment variables
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
POETRY_VERSION=1.7.1
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
gcc \
|
||||
python3-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy requirements file
|
||||
COPY requirements.txt .
|
||||
|
||||
# Install Python dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY . .
|
||||
|
||||
# Expose the port the app runs on
|
||||
EXPOSE 8888
|
||||
|
||||
# Command to run the application
|
||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8888"]
|
||||
@ -1,6 +1,6 @@
|
||||
apiVersion: v2
|
||||
name: freeleaps-authentication
|
||||
description: A Helm chart for FastAPI and Vue.js application
|
||||
name: authentication
|
||||
description: A Helm Chart of authentication, which part of Freeleaps Platform, powered by Freeleaps.
|
||||
type: application
|
||||
version: 0.1.0
|
||||
appVersion: "1.0.0"
|
||||
version: 0.0.1
|
||||
appVersion: "0.0.1"
|
||||
|
||||
@ -1,62 +0,0 @@
|
||||
{{/*
|
||||
Expand the name of the chart.
|
||||
*/}}
|
||||
{{- define "app.name" -}}
|
||||
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create a default fully qualified app name.
|
||||
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
|
||||
If release name contains chart name it will be used as a full name.
|
||||
*/}}
|
||||
{{- define "app.fullname" -}}
|
||||
{{- if .Values.fullnameOverride }}
|
||||
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- $name := default .Chart.Name .Values.nameOverride }}
|
||||
{{- if contains $name .Release.Name }}
|
||||
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
|
||||
{{- else }}
|
||||
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create chart name and version as used by the chart label.
|
||||
*/}}
|
||||
{{- define "app.chart" -}}
|
||||
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Common labels
|
||||
*/}}
|
||||
{{- define "app.labels" -}}
|
||||
helm.sh/chart: {{ include "app.chart" . }}
|
||||
{{ include "app.selectorLabels" . }}
|
||||
{{- if .Chart.AppVersion }}
|
||||
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||
{{- end }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Selector labels
|
||||
*/}}
|
||||
{{- define "app.selectorLabels" -}}
|
||||
app.kubernetes.io/name: {{ include "app.name" . }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Create the name of the service account to use
|
||||
*/}}
|
||||
{{- define "app.serviceAccountName" -}}
|
||||
{{- if .Values.serviceAccount.create }}
|
||||
{{- default (include "app.fullname" .) .Values.serviceAccount.name }}
|
||||
{{- else }}
|
||||
{{- default "default" .Values.serviceAccount.name }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
@ -1,58 +0,0 @@
|
||||
{{- if .Values.freeleapsAuthenticationApiServer.enabled -}}
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-api-server
|
||||
labels:
|
||||
{{- include "app.labels" . | nindent 4 }}
|
||||
component: api-server
|
||||
spec:
|
||||
replicas: {{ .Values.freeleapsAuthenticationApiServer.replicas }}
|
||||
selector:
|
||||
matchLabels:
|
||||
{{- include "app.selectorLabels" . | nindent 6 }}
|
||||
component: api-server
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
{{- include "app.selectorLabels" . | nindent 8 }}
|
||||
component: api-server
|
||||
spec:
|
||||
serviceAccountName: {{ include "app.serviceAccountName" . }}
|
||||
containers:
|
||||
- name: api-server
|
||||
image: "{{ .Values.freeleapsAuthenticationApiServer.image.registry | default .Values.global.registry }}/{{ .Values.freeleapsAuthenticationApiServer.image.repository | default .Values.global.repository }}/{{ .Values.freeleapsAuthenticationApiServer.image.name }}:{{ .Values.freeleapsAuthenticationApiServer.image.tag }}"
|
||||
imagePullPolicy: {{ .Values.freeleapsAuthenticationApiServer.image.imagePullPolicy }}
|
||||
ports:
|
||||
{{- toYaml .Values.freeleapsAuthenticationApiServer.ports | nindent 12 }}
|
||||
resources:
|
||||
{{- toYaml .Values.freeleapsAuthenticationApiServer.resources | nindent 12 }}
|
||||
env:
|
||||
{{- toYaml .Values.freeleapsAuthenticationApiServer.env | nindent 12 }}
|
||||
{{- with .Values.freeleapsAuthenticationApiServer.livenessProbe }}
|
||||
livenessProbe:
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- with .Values.freeleapsAuthenticationApiServer.readinessProbe }}
|
||||
readinessProbe:
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: freeleaps-authentication-api-server
|
||||
labels:
|
||||
{{- include "app.labels" . | nindent 4 }}
|
||||
component: api-server
|
||||
spec:
|
||||
type: {{ .Values.service.type }}
|
||||
ports:
|
||||
- port: 8888
|
||||
targetPort: http
|
||||
protocol: TCP
|
||||
name: api-server
|
||||
selector:
|
||||
{{- include "app.selectorLabels" . | nindent 4 }}
|
||||
component: api-server
|
||||
{{- end }}
|
||||
@ -0,0 +1,21 @@
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: authentication-config
|
||||
namespace: {{ .Release.Namespace }}
|
||||
type: Opaque
|
||||
data:
|
||||
TZ: {{ .Values.authentication.configs.tz | b64enc | quote }}
|
||||
APP_NAME: {{ .Values.authentication.configs.appName | b64enc | quote }}
|
||||
APP_ENV: {{ .Values.authentication.configs.appEnv | b64enc | quote }}
|
||||
DEVSVC_WEBAPI_URL_BASE: {{ .Values.authentication.configs.devsvcWebapiUrlBase | b64enc | quote }}
|
||||
NOTIFICATION_WEBAPI_URL_BASE: {{ .Values.authentication.configs.notificationWebapiUrlBase | b64enc | quote }}
|
||||
AUTH_SERVICE_ENDPOINT: {{ .Values.authentication.configs.authServiceEndpoint | b64enc | quote }}
|
||||
JWT_ALGORITHM: {{ .Values.authentication.configs.jwtAlgorithm | b64enc | quote }}
|
||||
SERVICE_API_ACCESS_HOST: {{ .Values.authentication.configs.serviceApiAccessHost | b64enc | quote }}
|
||||
SERVICE_API_ACCESS_PORT: {{ .Values.authentication.configs.serviceApiAccessPort | toString | b64enc }}
|
||||
MONGODB_NAME: {{ .Values.authentication.configs.mongodbName | b64enc | quote }}
|
||||
MONGODB_PORT: {{ .Values.authentication.configs.mongodbPort | toString | b64enc }}
|
||||
METRICS_ENABLED: {{ .Values.authentication.configs.metricsEnabled | default false | toString | b64enc }}
|
||||
PROBES_ENABLED: {{ .Values.authentication.configs.probesEnabled | default false | toString | b64enc }}
|
||||
|
||||
@ -0,0 +1,27 @@
|
||||
{{ $namespace := .Release.Namespace }}
|
||||
{{ $appVersion := .Chart.AppVersion | quote }}
|
||||
{{ $releaseCertificate := .Release.Service }}
|
||||
{{ $releaseName := .Release.Name }}
|
||||
{{- range $ingress := .Values.authentication.ingresses }}
|
||||
{{- if not $ingress.tls.exists }}
|
||||
---
|
||||
apiVersion: cert-manager.io/v1
|
||||
kind: Certificate
|
||||
metadata:
|
||||
name: {{ $ingress.name }}
|
||||
namespace: {{ $namespace }}
|
||||
labels:
|
||||
app.kubernetes.io/version: {{ $appVersion }}
|
||||
app.kubernetes.io/name: {{ $ingress.name | quote }}
|
||||
app.kubernetes.io/managed-by: {{ $releaseCertificate }}
|
||||
app.kubernetes.io/instance: {{ $releaseName }}
|
||||
spec:
|
||||
commonName: {{ $ingress.host }}
|
||||
dnsNames:
|
||||
- {{ $ingress.host }}
|
||||
issuerRef:
|
||||
name: {{ $ingress.tls.issuerRef.name }}
|
||||
kind: {{ $ingress.tls.issuerRef.kind }}
|
||||
secretName: {{ $ingress.tls.name }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
@ -0,0 +1,865 @@
|
||||
{{- if .Values.dashboard.enabled }}
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: {{ .Values.dashboard.name }}
|
||||
namespace: {{ .Values.dashboard.namespace }}
|
||||
labels:
|
||||
grafana_dashboard: "1"
|
||||
data:
|
||||
{{ .Values.dashboard.name }}.json: |
|
||||
{
|
||||
"annotations": {
|
||||
"list": [
|
||||
{
|
||||
"builtIn": 1,
|
||||
"datasource": {
|
||||
"type": "datasource",
|
||||
"uid": "grafana"
|
||||
},
|
||||
"enable": true,
|
||||
"hide": true,
|
||||
"iconColor": "rgba(0, 211, 255, 1)",
|
||||
"name": "Annotations & Alerts",
|
||||
"type": "dashboard"
|
||||
}
|
||||
]
|
||||
},
|
||||
"editable": true,
|
||||
"fiscalYearStartMonth": 0,
|
||||
"graphTooltip": 0,
|
||||
"id": 36,
|
||||
"links": [],
|
||||
"panels": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "prometheus"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "palette-classic"
|
||||
},
|
||||
"custom": {
|
||||
"axisBorderShow": false,
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"barWidthFactor": 0.6,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 0,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": {
|
||||
"legend": false,
|
||||
"tooltip": false,
|
||||
"viz": false
|
||||
},
|
||||
"insertNulls": false,
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"showPoints": "auto",
|
||||
"spanNulls": false,
|
||||
"stacking": {
|
||||
"group": "A",
|
||||
"mode": "none"
|
||||
},
|
||||
"thresholdsStyle": {
|
||||
"mode": "off"
|
||||
}
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 9,
|
||||
"x": 0,
|
||||
"y": 0
|
||||
},
|
||||
"id": 2,
|
||||
"options": {
|
||||
"legend": {
|
||||
"calcs": [
|
||||
"mean",
|
||||
"lastNotNull",
|
||||
"max",
|
||||
"min"
|
||||
],
|
||||
"displayMode": "table",
|
||||
"placement": "right",
|
||||
"showLegend": true
|
||||
},
|
||||
"tooltip": {
|
||||
"hideZeros": false,
|
||||
"mode": "single",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"pluginVersion": "11.5.2",
|
||||
"targets": [
|
||||
{
|
||||
"$$hashKey": "object:214",
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "e4584a9f-5364-4b3d-a851-7abbc5250820"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"expr": "increase({{ .Values.dashboard.metricsPrefix }}_http_requests_total[1m])",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "{{ `{{ method }} {{ handler }}` }}",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Total requests per minute",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "prometheus"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "palette-classic"
|
||||
},
|
||||
"custom": {
|
||||
"axisBorderShow": false,
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"barWidthFactor": 0.6,
|
||||
"drawStyle": "bars",
|
||||
"fillOpacity": 100,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": {
|
||||
"legend": false,
|
||||
"tooltip": false,
|
||||
"viz": false
|
||||
},
|
||||
"insertNulls": false,
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"showPoints": "never",
|
||||
"spanNulls": false,
|
||||
"stacking": {
|
||||
"group": "A",
|
||||
"mode": "normal"
|
||||
},
|
||||
"thresholdsStyle": {
|
||||
"mode": "off"
|
||||
}
|
||||
},
|
||||
"mappings": [],
|
||||
"min": 0,
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "4xx"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
"id": "color",
|
||||
"value": {
|
||||
"fixedColor": "red",
|
||||
"mode": "fixed"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "HTTP 500"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
"id": "color",
|
||||
"value": {
|
||||
"fixedColor": "#bf1b00",
|
||||
"mode": "fixed"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 10,
|
||||
"x": 9,
|
||||
"y": 0
|
||||
},
|
||||
"id": 13,
|
||||
"options": {
|
||||
"legend": {
|
||||
"calcs": [
|
||||
"mean",
|
||||
"max"
|
||||
],
|
||||
"displayMode": "list",
|
||||
"placement": "bottom",
|
||||
"showLegend": true
|
||||
},
|
||||
"tooltip": {
|
||||
"hideZeros": false,
|
||||
"mode": "multi",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"pluginVersion": "11.5.2",
|
||||
"targets": [
|
||||
{
|
||||
"$$hashKey": "object:140",
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "e4584a9f-5364-4b3d-a851-7abbc5250820"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"expr": "sum by (status) (rate({{ .Values.dashboard.metricsPrefix }}_http_requests_total[1m]))",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "{{ `{{ status }}` }}",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Request per minute",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "prometheus"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "palette-classic"
|
||||
},
|
||||
"custom": {
|
||||
"axisBorderShow": false,
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"barWidthFactor": 0.6,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 10,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": {
|
||||
"legend": false,
|
||||
"tooltip": false,
|
||||
"viz": false
|
||||
},
|
||||
"insertNulls": false,
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"showPoints": "never",
|
||||
"spanNulls": false,
|
||||
"stacking": {
|
||||
"group": "A",
|
||||
"mode": "none"
|
||||
},
|
||||
"thresholdsStyle": {
|
||||
"mode": "off"
|
||||
}
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "errors"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
"id": "color",
|
||||
"value": {
|
||||
"fixedColor": "#c15c17",
|
||||
"mode": "fixed"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 5,
|
||||
"x": 19,
|
||||
"y": 0
|
||||
},
|
||||
"id": 4,
|
||||
"options": {
|
||||
"legend": {
|
||||
"calcs": [
|
||||
"mean",
|
||||
"lastNotNull",
|
||||
"max"
|
||||
],
|
||||
"displayMode": "list",
|
||||
"placement": "bottom",
|
||||
"showLegend": true
|
||||
},
|
||||
"tooltip": {
|
||||
"hideZeros": false,
|
||||
"mode": "multi",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"pluginVersion": "11.5.2",
|
||||
"targets": [
|
||||
{
|
||||
"$$hashKey": "object:766",
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "e4584a9f-5364-4b3d-a851-7abbc5250820"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"expr": "sum(rate({{ .Values.dashboard.metricsPrefix }}_http_requests_total{status=\"5xx\"}[1m]))",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "errors",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Errors per second",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "prometheus"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "palette-classic"
|
||||
},
|
||||
"custom": {
|
||||
"axisBorderShow": false,
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"barWidthFactor": 0.6,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 0,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": {
|
||||
"legend": false,
|
||||
"tooltip": false,
|
||||
"viz": false
|
||||
},
|
||||
"insertNulls": false,
|
||||
"lineInterpolation": "smooth",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"showPoints": "auto",
|
||||
"spanNulls": false,
|
||||
"stacking": {
|
||||
"group": "A",
|
||||
"mode": "none"
|
||||
},
|
||||
"thresholdsStyle": {
|
||||
"mode": "off"
|
||||
}
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
},
|
||||
"unit": "s"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 9,
|
||||
"x": 0,
|
||||
"y": 7
|
||||
},
|
||||
"id": 6,
|
||||
"options": {
|
||||
"legend": {
|
||||
"calcs": [
|
||||
"mean",
|
||||
"lastNotNull",
|
||||
"max",
|
||||
"min"
|
||||
],
|
||||
"displayMode": "table",
|
||||
"placement": "right",
|
||||
"showLegend": true
|
||||
},
|
||||
"tooltip": {
|
||||
"hideZeros": false,
|
||||
"mode": "single",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"pluginVersion": "11.5.2",
|
||||
"targets": [
|
||||
{
|
||||
"$$hashKey": "object:146",
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "e4584a9f-5364-4b3d-a851-7abbc5250820"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"expr": "{{ .Values.dashboard.metricsPrefix }}_http_request_duration_seconds_sum{handler!=\"none\"} / {{ .Values.dashboard.metricsPrefix }}_http_request_duration_seconds_count",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "{{ `{{ handler }}` }}",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Average response time",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "prometheus"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "palette-classic"
|
||||
},
|
||||
"custom": {
|
||||
"axisBorderShow": false,
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"barWidthFactor": 0.6,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 10,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": {
|
||||
"legend": false,
|
||||
"tooltip": false,
|
||||
"viz": false
|
||||
},
|
||||
"insertNulls": false,
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"showPoints": "never",
|
||||
"spanNulls": false,
|
||||
"stacking": {
|
||||
"group": "A",
|
||||
"mode": "none"
|
||||
},
|
||||
"thresholdsStyle": {
|
||||
"mode": "off"
|
||||
}
|
||||
},
|
||||
"mappings": [],
|
||||
"max": 1,
|
||||
"min": 0,
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
},
|
||||
"unit": "percentunit"
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "none"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
"id": "color",
|
||||
"value": {
|
||||
"fixedColor": "red",
|
||||
"mode": "fixed"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 10,
|
||||
"x": 9,
|
||||
"y": 7
|
||||
},
|
||||
"id": 11,
|
||||
"options": {
|
||||
"legend": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
],
|
||||
"displayMode": "table",
|
||||
"placement": "right",
|
||||
"showLegend": true
|
||||
},
|
||||
"tooltip": {
|
||||
"hideZeros": false,
|
||||
"mode": "multi",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"pluginVersion": "11.5.2",
|
||||
"targets": [
|
||||
{
|
||||
"$$hashKey": "object:1079",
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "e4584a9f-5364-4b3d-a851-7abbc5250820"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"expr": "increase({{ .Values.dashboard.metricsPrefix }}_http_request_duration_seconds_bucket{le=\"0.1\"}[1m]) \n/ ignoring (le) increase({{ .Values.dashboard.metricsPrefix }}_http_request_duration_seconds_count[1m])",
|
||||
"format": "time_series",
|
||||
"instant": false,
|
||||
"interval": "",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "{{ `{{ handler }}` }}",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Requests under 100ms",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "prometheus"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "palette-classic"
|
||||
},
|
||||
"custom": {
|
||||
"axisBorderShow": false,
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"barWidthFactor": 0.6,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 10,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": {
|
||||
"legend": false,
|
||||
"tooltip": false,
|
||||
"viz": false
|
||||
},
|
||||
"insertNulls": false,
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"showPoints": "never",
|
||||
"spanNulls": false,
|
||||
"stacking": {
|
||||
"group": "A",
|
||||
"mode": "none"
|
||||
},
|
||||
"thresholdsStyle": {
|
||||
"mode": "line+area"
|
||||
}
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "transparent",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 0
|
||||
}
|
||||
]
|
||||
},
|
||||
"unit": "s"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 9,
|
||||
"x": 0,
|
||||
"y": 15
|
||||
},
|
||||
"id": 16,
|
||||
"options": {
|
||||
"legend": {
|
||||
"calcs": [
|
||||
"mean",
|
||||
"lastNotNull",
|
||||
"max",
|
||||
"min"
|
||||
],
|
||||
"displayMode": "table",
|
||||
"placement": "right",
|
||||
"showLegend": true
|
||||
},
|
||||
"tooltip": {
|
||||
"hideZeros": false,
|
||||
"mode": "multi",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"pluginVersion": "11.5.2",
|
||||
"targets": [
|
||||
{
|
||||
"$$hashKey": "object:426",
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "e4584a9f-5364-4b3d-a851-7abbc5250820"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"expr": "histogram_quantile(0.9, rate({{ .Values.dashboard.metricsPrefix }}_http_request_duration_seconds_bucket{handler!=\"none\"}[1m]))",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "{{ `{{ handler }}` }}",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Request duration [s] - p90",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "prometheus"
|
||||
},
|
||||
"description": "",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "palette-classic"
|
||||
},
|
||||
"custom": {
|
||||
"axisBorderShow": false,
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"barWidthFactor": 0.6,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 25,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": {
|
||||
"legend": false,
|
||||
"tooltip": false,
|
||||
"viz": false
|
||||
},
|
||||
"insertNulls": false,
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"showPoints": "auto",
|
||||
"spanNulls": false,
|
||||
"stacking": {
|
||||
"group": "A",
|
||||
"mode": "normal"
|
||||
},
|
||||
"thresholdsStyle": {
|
||||
"mode": "off"
|
||||
}
|
||||
},
|
||||
"mappings": [],
|
||||
"min": 0,
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
},
|
||||
"unit": "s"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 10,
|
||||
"x": 9,
|
||||
"y": 15
|
||||
},
|
||||
"id": 15,
|
||||
"options": {
|
||||
"legend": {
|
||||
"calcs": [
|
||||
"mean",
|
||||
"lastNotNull",
|
||||
"max",
|
||||
"min"
|
||||
],
|
||||
"displayMode": "table",
|
||||
"placement": "right",
|
||||
"showLegend": true
|
||||
},
|
||||
"tooltip": {
|
||||
"hideZeros": false,
|
||||
"mode": "single",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"pluginVersion": "11.5.2",
|
||||
"targets": [
|
||||
{
|
||||
"$$hashKey": "object:426",
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "e4584a9f-5364-4b3d-a851-7abbc5250820"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"expr": "histogram_quantile(0.5, rate({{ .Values.dashboard.metricsPrefix }}_http_request_duration_seconds_bucket{handler!=\"none\"}[1m]))",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "{{ `{{ handler }}` }}",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Request duration [s] - p50",
|
||||
"type": "timeseries"
|
||||
}
|
||||
],
|
||||
"preload": false,
|
||||
"refresh": "5s",
|
||||
"schemaVersion": 40,
|
||||
"tags": ["freeleaps"],
|
||||
"templating": {
|
||||
"list": []
|
||||
},
|
||||
"time": {
|
||||
"from": "now-5m",
|
||||
"to": "now"
|
||||
},
|
||||
"timepicker": {
|
||||
"refresh_intervals": []
|
||||
},
|
||||
"timezone": "",
|
||||
"title": "{{ .Values.dashboard.title }}",
|
||||
"uid": "",
|
||||
"version": 11,
|
||||
"weekStart": ""
|
||||
}
|
||||
{{- end }}
|
||||
@ -0,0 +1,131 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||
app.kubernetes.io/name: "authentication"
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
{{- if .Values.logIngest.enabled }}
|
||||
annotations:
|
||||
opentelemetry.io/config-checksum: {{ include (print $.Template.BasePath "/opentelemetry.yaml") . | sha256sum }}
|
||||
{{- end }}
|
||||
name: "authentication"
|
||||
namespace: {{ .Release.Namespace | quote }}
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/name: "authentication"
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
replicas: {{ .Values.authentication.replicas }}
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||
app.kubernetes.io/name: "authentication"
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
annotations:
|
||||
app.kubernetes.io/config-checksum: {{ include (print $.Template.BasePath "/authentication-config.yaml") . | sha256sum }}
|
||||
{{- if .Values.logIngest.enabled }}
|
||||
opentelemetry.io/config-checksum: {{ include (print $.Template.BasePath "/opentelemetry.yaml") . | sha256sum }}
|
||||
sidecar.opentelemetry.io/inject: "{{ .Release.Namespace}}/{{ .Release.Name }}-opentelemetry-collector"
|
||||
{{- end }}
|
||||
spec:
|
||||
{{- if .Values.logIngest.enabled }}
|
||||
serviceAccountName: "{{ .Release.Name }}-otel-collector"
|
||||
{{- end }}
|
||||
containers:
|
||||
- name: "authentication"
|
||||
image: "{{ coalesce .Values.authentication.image.registry .Values.global.registry "docker.io"}}/{{ coalesce .Values.authentication.image.repository .Values.global.repository }}/{{ .Values.authentication.image.name }}:{{ .Values.authentication.image.tag | default "latest" }}"
|
||||
imagePullPolicy: {{ .Values.authentication.image.imagePullPolicy | default "IfNotPresent" }}
|
||||
ports:
|
||||
{{- range $port := .Values.authentication.ports }}
|
||||
- containerPort: {{ $port.containerPort }}
|
||||
name: {{ $port.name }}
|
||||
protocol: {{ $port.protocol }}
|
||||
{{- end }}
|
||||
{{- if .Values.authentication.resources }}
|
||||
resources:
|
||||
{{- toYaml .Values.authentication.resources | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- if .Values.authentication.probes }}
|
||||
{{- if and (.Values.authentication.probes.liveness) (eq .Values.authentication.probes.liveness.type "httpGet") }}
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: {{ .Values.authentication.probes.liveness.config.path }}
|
||||
port: {{ .Values.authentication.probes.liveness.config.port }}
|
||||
{{- if .Values.authentication.probes.liveness.config.initialDelaySeconds }}
|
||||
initialDelaySeconds: {{ .Values.authentication.probes.liveness.config.initialDelaySeconds }}
|
||||
{{- end }}
|
||||
{{- if .Values.authentication.probes.liveness.config.periodSeconds }}
|
||||
periodSeconds: {{ .Values.authentication.probes.liveness.config.periodSeconds }}
|
||||
{{- end }}
|
||||
{{- if .Values.authentication.probes.liveness.config.timeoutSeconds }}
|
||||
timeoutSeconds: {{ .Values.authentication.probes.liveness.config.timeoutSeconds }}
|
||||
{{- end }}
|
||||
{{- if .Values.authentication.probes.liveness.config.successThreshold }}
|
||||
successThreshold: {{ .Values.authentication.probes.liveness.config.successThreshold }}
|
||||
{{- end }}
|
||||
{{- if .Values.authentication.probes.liveness.config.failureThreshold }}
|
||||
failureThreshold: {{ .Values.authentication.probes.liveness.config.failureThreshold }}
|
||||
{{- end }}
|
||||
{{- if .Values.authentication.probes.liveness.config.terminationGracePeriodSeconds }}
|
||||
terminationGracePeriodSeconds: {{ .Values.authentication.probes.liveness.config.terminationGracePeriodSeconds }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if and (.Values.authentication.probes.readiness) (eq .Values.authentication.probes.readiness.type "httpGet") }}
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: {{ .Values.authentication.probes.readiness.config.path }}
|
||||
port: {{ .Values.authentication.probes.readiness.config.port }}
|
||||
{{- if .Values.authentication.probes.readiness.config.initialDelaySeconds }}
|
||||
initialDelaySeconds: {{ .Values.authentication.probes.readiness.config.initialDelaySeconds }}
|
||||
{{- end }}
|
||||
{{- if .Values.authentication.probes.readiness.config.periodSeconds }}
|
||||
periodSeconds: {{ .Values.authentication.probes.readiness.config.periodSeconds }}
|
||||
{{- end }}
|
||||
{{- if .Values.authentication.probes.readiness.config.timeoutSeconds }}
|
||||
timeoutSeconds: {{ .Values.authentication.probes.readiness.config.timeoutSeconds }}
|
||||
{{- end }}
|
||||
{{- if .Values.authentication.probes.readiness.config.successThreshold }}
|
||||
successThreshold: {{ .Values.authentication.probes.readiness.config.successThreshold }}
|
||||
{{- end }}
|
||||
{{- if .Values.authentication.probes.readiness.config.failureThreshold }}
|
||||
failureThreshold: {{ .Values.authentication.probes.readiness.config.failureThreshold }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end}}
|
||||
env:
|
||||
{{- range $key, $value := .Values.authentication.configs }}
|
||||
{{- if not (or (eq $key "jwtSecretKey") (eq $key "mongodbUri")) }}
|
||||
- name: {{ $key | snakecase | upper }}
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: authentication-config
|
||||
key: {{ $key | snakecase | upper }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
# inject from secret created by FreeleapsSecret object
|
||||
{{- if .Values.authentication.secrets }}
|
||||
{{ $targetSecretName := .Values.authentication.secrets.target.name }}
|
||||
{{- range .Values.authentication.secrets.data }}
|
||||
- name: {{ .key | snakecase | upper }}
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ $targetSecretName }}
|
||||
key: {{ .key }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if .Values.logIngest.enabled }}
|
||||
volumeMounts:
|
||||
- name: app-logs
|
||||
mountPath: {{ .Values.logIngest.logPath }}
|
||||
{{- end }}
|
||||
{{- if .Values.logIngest.enabled }}
|
||||
volumes:
|
||||
- name: app-logs
|
||||
emptyDir: {}
|
||||
{{- end }}
|
||||
@ -0,0 +1,20 @@
|
||||
apiVersion: freeleaps.com/v1alpha1
|
||||
kind: FreeleapsSecret
|
||||
metadata:
|
||||
name: freeleaps-authentication-secrets
|
||||
namespace: {{ .Release.Namespace }}
|
||||
spec:
|
||||
secretStoreRef:
|
||||
kind: {{ .Values.authentication.secrets.secretStoreRef.kind }}
|
||||
name: {{ .Values.authentication.secrets.secretStoreRef.name }}
|
||||
target:
|
||||
name: {{ .Values.authentication.secrets.target.name }}
|
||||
creationPolicy: {{ .Values.authentication.secrets.target.creationPolicy }}
|
||||
refreshInterval: {{ .Values.authentication.secrets.refreshInterval }}
|
||||
data:
|
||||
{{- range .Values.authentication.secrets.data }}
|
||||
- secretKey: {{ .key }}
|
||||
remoteRef:
|
||||
key: {{ .remoteRef.key }}
|
||||
type: {{ .remoteRef.type }}
|
||||
{{- end }}
|
||||
@ -0,0 +1,36 @@
|
||||
{{ $namespace := .Release.Namespace }}
|
||||
{{ $appVersion := .Chart.AppVersion | quote }}
|
||||
{{ $releaseIngress := .Release.Service }}
|
||||
{{ $releaseName := .Release.Name }}
|
||||
{{- range $ingress := .Values.authentication.ingresses }}
|
||||
---
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: {{ $ingress.name }}
|
||||
namespace: {{ $namespace }}
|
||||
labels:
|
||||
app.kubernetes.io/version: {{ $appVersion }}
|
||||
app.kubernetes.io/name: {{ $ingress.name | quote }}
|
||||
app.kubernetes.io/managed-by: {{ $releaseIngress }}
|
||||
app.kubernetes.io/instance: {{ $releaseName }}
|
||||
spec:
|
||||
{{- if $ingress.class }}
|
||||
ingressClassName: {{ $ingress.class }}
|
||||
{{- end }}
|
||||
{{- if $ingress.tls }}
|
||||
tls:
|
||||
- hosts:
|
||||
- {{ $ingress.host }}
|
||||
{{- if $ingress.tls.exists }}
|
||||
secretName: {{ $ingress.tls.secretRef.name }}
|
||||
{{- else }}
|
||||
secretName: {{ $ingress.tls.name }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
rules:
|
||||
- host: {{ $ingress.host }}
|
||||
http:
|
||||
paths:
|
||||
{{- toYaml $ingress.rules | nindent 10 }}
|
||||
{{- end }}
|
||||
@ -0,0 +1,46 @@
|
||||
{{- if .Values.logIngest.enabled }}
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-otel-collector
|
||||
namespace: {{ .Release.Namespace }}
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRole
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-otel-collector
|
||||
rules:
|
||||
- apiGroups: [""]
|
||||
resources:
|
||||
- pods
|
||||
- namespaces
|
||||
- nodes
|
||||
verbs:
|
||||
- get
|
||||
- watch
|
||||
- list
|
||||
- apiGroups: ["apps"]
|
||||
resources:
|
||||
- replicasets
|
||||
- deployments
|
||||
- statefulsets
|
||||
- daemonsets
|
||||
verbs:
|
||||
- get
|
||||
- watch
|
||||
- list
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRoleBinding
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-otel-collector
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: ClusterRole
|
||||
name: {{ .Release.Name }}-otel-collector
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: {{ .Release.Name }}-otel-collector
|
||||
namespace: {{ .Release.Namespace }}
|
||||
{{- end }}
|
||||
@ -0,0 +1,119 @@
|
||||
{{- if .Values.logIngest.enabled }}
|
||||
apiVersion: opentelemetry.io/v1beta1
|
||||
kind: OpenTelemetryCollector
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-opentelemetry-collector
|
||||
namespace: {{ .Release.Namespace }}
|
||||
spec:
|
||||
mode: sidecar
|
||||
image: ghcr.io/open-telemetry/opentelemetry-collector-releases/opentelemetry-collector-contrib:latest
|
||||
serviceAccount: "{{ .Release.Name }}-otel-collector"
|
||||
volumeMounts:
|
||||
- name: app-logs
|
||||
mountPath: {{ .Values.logIngest.logPath }}
|
||||
securityContext:
|
||||
allowPrivilegeEscalation: true
|
||||
privileged: true
|
||||
runAsUser: 0
|
||||
runAsGroup: 0
|
||||
env:
|
||||
- name: KUBE_META_POD_NAME
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: metadata.name
|
||||
- name: KUBE_META_NAMESPACE
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: metadata.namespace
|
||||
- name: KUBE_META_NODE_NAME
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: spec.nodeName
|
||||
- name: KUBE_META_POD_IP
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: status.podIP
|
||||
- name: KUBE_META_POD_UID
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: metadata.uid
|
||||
- name: KUBE_META_OBJECT_NAME
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: metadata.labels['app.kubernetes.io/instance']
|
||||
config:
|
||||
receivers:
|
||||
filelog:
|
||||
include:
|
||||
- {{ .Values.logIngest.logPathPattern }}
|
||||
start_at: beginning
|
||||
include_file_path: false
|
||||
include_file_name: false
|
||||
operators: []
|
||||
processors:
|
||||
resource:
|
||||
attributes:
|
||||
- action: insert
|
||||
key: k8s.node.name
|
||||
value: ${KUBE_META_NODE_NAME}
|
||||
- action: insert
|
||||
key: k8s.pod.name
|
||||
value: ${KUBE_META_POD_NAME}
|
||||
- action: insert
|
||||
key: k8s.pod.ip
|
||||
value: ${KUBE_META_POD_IP}
|
||||
- action: insert
|
||||
key: k8s.pod.uid
|
||||
value: ${KUBE_META_POD_UID}
|
||||
- action: insert
|
||||
key: k8s.namespace.name
|
||||
value: ${KUBE_META_NAMESPACE}
|
||||
- action: insert
|
||||
key: k8s.deployment.name
|
||||
value: ${KUBE_META_OBJECT_NAME}
|
||||
transform:
|
||||
log_statements:
|
||||
- context: log
|
||||
statements:
|
||||
# Set Grafana queryable labels
|
||||
- set(resource.attributes["service_name"], "authentication")
|
||||
- set(resource.attributes["environment"], "{{ .Values.global.environment | default .Release.Namespace }}")
|
||||
- set(resource.attributes["pod_name"], resource.attributes["k8s.pod.name"])
|
||||
- set(resource.attributes["pod_ip"], resource.attributes["k8s.pod.ip"])
|
||||
# Keep application for backward compatibility
|
||||
- set(resource.attributes["application"], "authentication")
|
||||
# Set additional kubernetes labels
|
||||
- set(resource.attributes["kubernetes_node_name"], resource.attributes["k8s.node.name"])
|
||||
- set(resource.attributes["kubernetes_pod_name"], resource.attributes["k8s.pod.name"])
|
||||
- set(resource.attributes["kubernetes_pod_ip"], resource.attributes["k8s.pod.ip"])
|
||||
- set(resource.attributes["kubernetes_deployment_name"], resource.attributes["k8s.deployment.name"])
|
||||
- set(resource.attributes["kubernetes_namespace"], resource.attributes["k8s.namespace.name"])
|
||||
# Parse and enrich log body
|
||||
- set(resource.attributes["body_json"], ParseJSON(log.body))
|
||||
- set(resource.attributes["body_json"]["kubernetes"]["pod"], resource.attributes["k8s.pod.name"])
|
||||
- set(resource.attributes["body_json"]["kubernetes"]["namespace"], resource.attributes["k8s.namespace.name"])
|
||||
- set(resource.attributes["body_json"]["kubernetes"]["pod_ip"], resource.attributes["k8s.pod.ip"])
|
||||
- set(resource.attributes["body_json"]["kubernetes"]["pod_uid"], resource.attributes["k8s.pod.uid"])
|
||||
- set(resource.attributes["body_json"]["kubernetes"]["deployment"], resource.attributes["k8s.deployment.name"])
|
||||
- set(resource.attributes["body_json"]["kubernetes"]["node"], resource.attributes["k8s.node.name"])
|
||||
- set(resource.attributes["body_json"]["kubernetes"]["namespace"], resource.attributes["k8s.namespace.name"])
|
||||
- set(log.body, resource.attributes["body_json"])
|
||||
- delete_key(resource.attributes, "body_json")
|
||||
batch:
|
||||
send_batch_size: 5
|
||||
timeout: 10s
|
||||
exporters:
|
||||
otlphttp/logs:
|
||||
endpoint: {{ .Values.logIngest.lokiEndpoint }}/otlp
|
||||
tls:
|
||||
insecure: true
|
||||
service:
|
||||
telemetry:
|
||||
logs:
|
||||
level: info
|
||||
pipelines:
|
||||
logs:
|
||||
receivers: [filelog]
|
||||
processors: [resource, transform, batch]
|
||||
exporters: [otlphttp/logs]
|
||||
{{- end }}
|
||||
@ -0,0 +1,26 @@
|
||||
{{ $namespace := .Release.Namespace }}
|
||||
{{ $appVersion := .Chart.AppVersion | quote }}
|
||||
{{ $releaseService := .Release.Service }}
|
||||
{{ $releaseName := .Release.Name }}
|
||||
{{- range $service := .Values.authentication.services }}
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: {{ $service.name }}
|
||||
namespace: {{ $namespace }}
|
||||
labels:
|
||||
app.kubernetes.io/version: {{ $appVersion }}
|
||||
app.kubernetes.io/name: {{ $service.name | quote }}
|
||||
app.kubernetes.io/managed-by: {{ $releaseService }}
|
||||
app.kubernetes.io/instance: {{ $releaseName }}
|
||||
spec:
|
||||
ports:
|
||||
- port: {{ $service.port }}
|
||||
targetPort: {{ $service.targetPort }}
|
||||
selector:
|
||||
app.kubernetes.io/version: {{ $appVersion }}
|
||||
app.kubernetes.io/name: "authentication"
|
||||
app.kubernetes.io/managed-by: {{ $releaseService }}
|
||||
app.kubernetes.io/instance: {{ $releaseName }}
|
||||
{{- end }}
|
||||
@ -0,0 +1,40 @@
|
||||
{{ $namespace := .Release.Namespace }}
|
||||
{{ $appVersion := .Chart.AppVersion | quote }}
|
||||
{{ $releaseService := .Release.Service }}
|
||||
{{ $releaseName := .Release.Name }}
|
||||
|
||||
{{- range $service := .Values.authentication.services }}
|
||||
{{- if $service.serviceMonitor.enabled }}
|
||||
---
|
||||
apiVersion: monitoring.coreos.com/v1
|
||||
kind: ServiceMonitor
|
||||
metadata:
|
||||
name: {{ $service.name }}-monitor
|
||||
namespace: {{ $service.serviceMonitor.namespace }}
|
||||
labels:
|
||||
app.kubernetes.io/version: {{ $appVersion }}
|
||||
app.kubernetes.io/name: {{ $service.name }}-monitor
|
||||
app.kubernetes.io/managed-by: {{ $releaseService }}
|
||||
app.kubernetes.io/instance: {{ $releaseName }}
|
||||
{{- if $service.serviceMonitor.labels }}
|
||||
{{- toYaml $service.serviceMonitor.labels | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
endpoints:
|
||||
- path: /api/_/metrics
|
||||
targetPort: {{ $service.targetPort }}
|
||||
{{- if $service.serviceMonitor.interval }}
|
||||
interval: {{ $service.serviceMonitor.interval }}
|
||||
{{- end }}
|
||||
{{- if $service.serviceMonitor.scrapeTimeout }}
|
||||
scrapeTimeout: {{ $service.serviceMonitor.scrapeTimeout }}
|
||||
{{- end }}
|
||||
namespaceSelector:
|
||||
matchNames:
|
||||
- {{ $namespace | quote }}
|
||||
selector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/name: {{ $service.name }}
|
||||
app.kubernetes.io/instance: {{ $releaseName }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
32
.freeleaps/devops/helm-pkg/templates/authentication/vpa.yaml
Normal file
32
.freeleaps/devops/helm-pkg/templates/authentication/vpa.yaml
Normal file
@ -0,0 +1,32 @@
|
||||
{{- if .Values.authentication.vpa }}
|
||||
---
|
||||
apiVersion: autoscaling.k8s.io/v1
|
||||
kind: VerticalPodAutoscaler
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-vpa
|
||||
namespace: {{ .Release.Namespace }}
|
||||
spec:
|
||||
targetRef:
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
name: authentication
|
||||
resourcePolicy:
|
||||
containerPolicies:
|
||||
- containerName: '*'
|
||||
{{- if .Values.authentication.vpa.minAllowed.enabled }}
|
||||
minAllowed:
|
||||
cpu: {{ .Values.authentication.vpa.minAllowed.cpu }}
|
||||
memory: {{ .Values.authentication.vpa.minAllowed.memory }}
|
||||
{{- end }}
|
||||
{{- if .Values.authentication.vpa.maxAllowed.enabled }}
|
||||
maxAllowed:
|
||||
cpu: {{ .Values.authentication.vpa.maxAllowed.cpu }}
|
||||
memory: {{ .Values.authentication.vpa.maxAllowed.memory }}
|
||||
{{- end }}
|
||||
{{- if .Values.authentication.vpa.controlledResources }}
|
||||
controlledResources:
|
||||
{{- range .Values.authentication.vpa.controlledResources }}
|
||||
- {{ . }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
@ -1,8 +0,0 @@
|
||||
{{- if .Values.serviceAccount.create -}}
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: {{ include "app.serviceAccountName" . }}
|
||||
labels:
|
||||
{{- include "app.labels" . | nindent 4 }}
|
||||
{{- end }}
|
||||
@ -1,58 +0,0 @@
|
||||
{{- if .Values.freeleapsAuthenticationWebServer.enabled -}}
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-web-server
|
||||
labels:
|
||||
{{- include "app.labels" . | nindent 4 }}
|
||||
component: web-server
|
||||
spec:
|
||||
replicas: {{ .Values.freeleapsAuthenticationWebServer.replicas }}
|
||||
selector:
|
||||
matchLabels:
|
||||
{{- include "app.selectorLabels" . | nindent 6 }}
|
||||
component: web-server
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
{{- include "app.selectorLabels" . | nindent 8 }}
|
||||
component: web-server
|
||||
spec:
|
||||
serviceAccountName: {{ include "app.serviceAccountName" . }}
|
||||
containers:
|
||||
- name: web-server
|
||||
image: "{{ .Values.freeleapsAuthenticationWebServer.image.registry | default .Values.global.registry }}/{{ .Values.freeleapsAuthenticationWebServer.image.repository | default .Values.global.repository }}/{{ .Values.freeleapsAuthenticationWebServer.image.name }}:{{ .Values.freeleapsAuthenticationWebServer.image.tag }}"
|
||||
imagePullPolicy: {{ .Values.freeleapsAuthenticationWebServer.image.imagePullPolicy }}
|
||||
ports:
|
||||
{{- toYaml .Values.freeleapsAuthenticationWebServer.ports | nindent 12 }}
|
||||
resources:
|
||||
{{- toYaml .Values.freeleapsAuthenticationWebServer.resources | nindent 12 }}
|
||||
env:
|
||||
{{- toYaml .Values.freeleapsAuthenticationWebServer.env | nindent 12 }}
|
||||
{{- with .Values.freeleapsAuthenticationWebServer.livenessProbe }}
|
||||
livenessProbe:
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- with .Values.freeleapsAuthenticationWebServer.readinessProbe }}
|
||||
readinessProbe:
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: freeleaps-authentication-web-server
|
||||
labels:
|
||||
{{- include "app.labels" . | nindent 4 }}
|
||||
component: web-server
|
||||
spec:
|
||||
type: {{ .Values.service.type }}
|
||||
ports:
|
||||
- port: 80
|
||||
targetPort: http
|
||||
protocol: TCP
|
||||
name: web-server
|
||||
selector:
|
||||
{{- include "app.selectorLabels" . | nindent 4 }}
|
||||
component: web-server
|
||||
{{- end }}
|
||||
@ -1,100 +1,147 @@
|
||||
# Default values for the Helm chart
|
||||
# This is a YAML-formatted file.
|
||||
# Declare variables to be passed into your templates.
|
||||
|
||||
# Global settings
|
||||
dashboard:
|
||||
enabled: false
|
||||
logIngest:
|
||||
enabled: true
|
||||
lokiEndpoint: http://loki-gateway.freeleaps-logging-system
|
||||
logPathPattern: /app/log/authentication/*.log
|
||||
logPath: /app/log/authentication
|
||||
global:
|
||||
environment: alpha
|
||||
registry: docker.io
|
||||
repository: freeleapsdevops
|
||||
nodeSelector: {}
|
||||
|
||||
# Name override settings
|
||||
nameOverride: ""
|
||||
fullnameOverride: ""
|
||||
|
||||
freeleapsAuthenticationApiServer:
|
||||
enabled: true
|
||||
name: freeleaps-authentication-api-server
|
||||
repository: freeleaps
|
||||
authentication:
|
||||
replicas: 1
|
||||
image:
|
||||
registry: ""
|
||||
repository: ""
|
||||
name: api-server
|
||||
tag: latest
|
||||
registry: docker.io
|
||||
repository: null
|
||||
name: authentication
|
||||
tag: snapshot-512e418
|
||||
imagePullPolicy: IfNotPresent
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 8888
|
||||
containerPort: 8004
|
||||
protocol: TCP
|
||||
resources:
|
||||
requests:
|
||||
cpu: "0.2"
|
||||
memory: "256Mi"
|
||||
cpu: 50m
|
||||
memory: 64Mi
|
||||
limits:
|
||||
cpu: "0.5"
|
||||
memory: "512Mi"
|
||||
env:
|
||||
- name: PYTHONUNBUFFERED
|
||||
value: "1"
|
||||
- name: DEBUG
|
||||
value: "false"
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /api/v1/_/livez
|
||||
port: http
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /api/v1/_/readyz
|
||||
port: http
|
||||
cpu: 200m
|
||||
memory: 128Mi
|
||||
probes:
|
||||
readiness:
|
||||
type: httpGet
|
||||
config:
|
||||
path: /api/_/readyz
|
||||
port: 8004
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 5
|
||||
|
||||
freeleapsAuthenticationWebServer:
|
||||
enabled: true
|
||||
name: freeleaps-authentication-web-server
|
||||
replicas: 1
|
||||
image:
|
||||
registry: ""
|
||||
repository: ""
|
||||
name: web-server
|
||||
tag: latest
|
||||
imagePullPolicy: IfNotPresent
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 80
|
||||
protocol: TCP
|
||||
resources:
|
||||
requests:
|
||||
cpu: "0.1"
|
||||
memory: "128Mi"
|
||||
limits:
|
||||
cpu: "0.3"
|
||||
memory: "256Mi"
|
||||
env:
|
||||
- name: NAMESPACE
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: metadata.namespace
|
||||
- name: API_SERVER_URL
|
||||
value: "http://freeleaps-authentication-api-server.svc.freeleaps.cluster:8888"
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
periodSeconds: 30
|
||||
timeoutSeconds: 3
|
||||
successThreshold: 1
|
||||
failureThreshold: 3
|
||||
liveness:
|
||||
type: httpGet
|
||||
config:
|
||||
path: /api/_/livez
|
||||
port: 8004
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 5
|
||||
|
||||
serviceAccount:
|
||||
create: true
|
||||
name: ""
|
||||
|
||||
service:
|
||||
periodSeconds: 15
|
||||
timeoutSeconds: 3
|
||||
successThreshold: 1
|
||||
failureThreshold: 3
|
||||
terminationGracePeriodSeconds: 30
|
||||
services:
|
||||
- name: authentication-service
|
||||
type: ClusterIP
|
||||
port: 8004
|
||||
targetPort: 8004
|
||||
serviceMonitor:
|
||||
enabled: false
|
||||
ingresses:
|
||||
- name: authentication-ingress
|
||||
host: authentication.freeleaps-alpha.com
|
||||
class: nginx
|
||||
rules:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: authentication-service
|
||||
port:
|
||||
number: 8004
|
||||
tls:
|
||||
exists: false
|
||||
issuerRef:
|
||||
name: freeleaps-alpha-dot-com
|
||||
kind: ClusterIssuer
|
||||
name: authentication.freeleaps-alpha.com-cert
|
||||
configs:
|
||||
tz: UTC
|
||||
appName: authentication
|
||||
appEnv: alpha
|
||||
devsvcWebapiUrlBase: http://devsvc-service.freeleaps-alpha.svc.freeleaps.cluster:8007/api/devsvc/
|
||||
notificationWebapiUrlBase: http://notification-service.freeleaps-alpha.svc.freeleaps.cluster:8003/api/notification/
|
||||
authServiceEndpoint: http://freeleaps-auth-service.68c0da88a0a7837e84b580eb-alpha.svc.freeleaps.cluster:9000/api/v1/
|
||||
jwtAlgorithm: HS256
|
||||
serviceApiAccessHost: 0.0.0.0
|
||||
serviceApiAccessPort: 8004
|
||||
mongodbName: freeleaps2
|
||||
mongodbPort: 27017
|
||||
metricsEnabled: 'false'
|
||||
probesEnabled: 'true'
|
||||
secrets:
|
||||
secretStoreRef:
|
||||
kind: FreeleapsSecretStore
|
||||
name: freeleaps-main-secret-store
|
||||
target:
|
||||
name: freeleaps-authentication-secrets
|
||||
creationPolicy: Owner
|
||||
refreshInterval: 30s
|
||||
data:
|
||||
- key: jwtSecretKey
|
||||
remoteRef:
|
||||
key: freeleaps-alpha-jwt-secret-key
|
||||
type: Secret
|
||||
- key: mongodbUri
|
||||
remoteRef:
|
||||
key: freeleaps-alpha-mongodb-uri
|
||||
type: Secret
|
||||
vpa:
|
||||
minAllowed:
|
||||
enabled: false
|
||||
cpu: 100m
|
||||
memory: 64Mi
|
||||
maxAllowed:
|
||||
enabled: true
|
||||
cpu: 100m
|
||||
memory: 256Mi
|
||||
controlledResources:
|
||||
- cpu
|
||||
- memory
|
||||
prometheusRule:
|
||||
name: freepeals-alpha-authentication
|
||||
enabled: false
|
||||
namespace: freeleaps-monitoring-system
|
||||
labels:
|
||||
release: kube-prometheus-stack
|
||||
rules:
|
||||
- alert: FreeleapsAuthenticationServiceDown
|
||||
expr: up{job="authentication-service"} == 0
|
||||
for: 1m
|
||||
labels:
|
||||
severity: critical
|
||||
service: authentication-service
|
||||
annotations:
|
||||
summary: Freeleaps Authentication service is down (instance {{ $labels.instance }})
|
||||
description: Freeleaps Authentication service has been down for more than 1 minutes.
|
||||
runbook_url: https://netorgft10898514.sharepoint.com/:w:/s/FreeleapsEngineeringTeam/EUlvzumTsPxCpPAzI3gm9OIB0DCLTjQzzYVL6VsHYZFjxg?e=0dxVr7
|
||||
- alert: FreeleapsAuthenticationServiceHighErrorRate
|
||||
expr: rate(http_requests_total{job="authentication-service",status=~"5.."}[5m]) > 0.1
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
service: authentication-service
|
||||
annotations:
|
||||
summary: High error rate in freeleaps authentication service (instance {{ $labels.instance }})
|
||||
description: Freeleaps Authentication service error rate is {{ $value }} errors per second.
|
||||
runbook_url: https://netorgft10898514.sharepoint.com/:w:/s/FreeleapsEngineeringTeam/EUlvzumTsPxCpPAzI3gm9OIB0DCLTjQzzYVL6VsHYZFjxg?e=0dxVr7
|
||||
|
||||
@ -1,100 +1,138 @@
|
||||
# Default values for the Helm chart
|
||||
# This is a YAML-formatted file.
|
||||
# Declare variables to be passed into your templates.
|
||||
|
||||
# Global settings
|
||||
dashboard:
|
||||
enabled: true
|
||||
name: freeleaps-prod-authentication-dashboard
|
||||
title: Authentication Service Dashboard (PROD)
|
||||
metricsPrefix: freeleaps_authentication
|
||||
logIngest:
|
||||
enabled: true
|
||||
lokiEndpoint: http://loki-gateway.freeleaps-logging-system
|
||||
logPathPattern: /app/log/authentication/*.log
|
||||
logPath: /app/log/authentication
|
||||
global:
|
||||
environment: prod
|
||||
registry: docker.io
|
||||
repository: freeleapsdevops
|
||||
nodeSelector: {}
|
||||
|
||||
# Name override settings
|
||||
nameOverride: ""
|
||||
fullnameOverride: ""
|
||||
|
||||
freeleapsAuthenticationApiServer:
|
||||
enabled: true
|
||||
name: freeleaps-authentication-api-server
|
||||
repository: freeleaps
|
||||
authentication:
|
||||
replicas: 1
|
||||
image:
|
||||
registry: ""
|
||||
repository: ""
|
||||
name: api-server
|
||||
tag: latest
|
||||
registry: docker.io
|
||||
repository: null
|
||||
name: authentication
|
||||
tag: 1.15.0
|
||||
imagePullPolicy: IfNotPresent
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 8888
|
||||
containerPort: 8004
|
||||
protocol: TCP
|
||||
resources:
|
||||
requests:
|
||||
cpu: "0.2"
|
||||
memory: "256Mi"
|
||||
cpu: 200m
|
||||
memory: 64Mi
|
||||
limits:
|
||||
cpu: "0.5"
|
||||
memory: "512Mi"
|
||||
env:
|
||||
- name: PYTHONUNBUFFERED
|
||||
value: "1"
|
||||
- name: DEBUG
|
||||
value: "false"
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /api/v1/_/livez
|
||||
port: http
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /api/v1/_/readyz
|
||||
port: http
|
||||
cpu: 300m
|
||||
memory: 128Mi
|
||||
probes:
|
||||
readiness:
|
||||
type: httpGet
|
||||
config:
|
||||
path: /api/_/readyz
|
||||
port: 8004
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 5
|
||||
|
||||
freeleapsAuthenticationWebServer:
|
||||
enabled: true
|
||||
name: freeleaps-authentication-web-server
|
||||
replicas: 1
|
||||
image:
|
||||
registry: ""
|
||||
repository: ""
|
||||
name: web-server
|
||||
tag: latest
|
||||
imagePullPolicy: IfNotPresent
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 80
|
||||
protocol: TCP
|
||||
resources:
|
||||
requests:
|
||||
cpu: "0.1"
|
||||
memory: "128Mi"
|
||||
limits:
|
||||
cpu: "0.3"
|
||||
memory: "256Mi"
|
||||
env:
|
||||
- name: NAMESPACE
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: metadata.namespace
|
||||
- name: API_SERVER_URL
|
||||
value: "http://freeleaps-authentication-api-server.svc.freeleaps.cluster:8888"
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
periodSeconds: 30
|
||||
timeoutSeconds: 3
|
||||
successThreshold: 1
|
||||
failureThreshold: 3
|
||||
liveness:
|
||||
type: httpGet
|
||||
config:
|
||||
path: /api/_/livez
|
||||
port: 8004
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 5
|
||||
|
||||
serviceAccount:
|
||||
create: true
|
||||
name: ""
|
||||
|
||||
service:
|
||||
periodSeconds: 15
|
||||
timeoutSeconds: 3
|
||||
successThreshold: 1
|
||||
failureThreshold: 3
|
||||
terminationGracePeriodSeconds: 30
|
||||
services:
|
||||
- name: authentication-service
|
||||
type: ClusterIP
|
||||
port: 8004
|
||||
targetPort: 8004
|
||||
serviceMonitor:
|
||||
enabled: true
|
||||
labels:
|
||||
release: kube-prometheus-stack
|
||||
namespace: freeleaps-monitoring-system
|
||||
interval: 30s
|
||||
scrapeTimeout: ''
|
||||
ingresses: {}
|
||||
configs:
|
||||
tz: UTC
|
||||
appName: authentication
|
||||
appEnv: prod
|
||||
devsvcWebapiUrlBase: http://devsvc-service.freeleaps-prod.svc.freeleaps.cluster:8007/api/devsvc/
|
||||
notificationWebapiUrlBase: http://notification-service.freeleaps-prod.svc.freeleaps.cluster:8003/api/notification/
|
||||
authServiceEndpoint: http://freeleaps-auth-service.68c0da88a0a7837e84b580eb-prod.svc.freeleaps.cluster:9000/api/v1/
|
||||
jwtAlgorithm: HS256
|
||||
serviceApiAccessHost: 0.0.0.0
|
||||
serviceApiAccessPort: 8004
|
||||
mongodbName: freeleaps2
|
||||
mongodbPort: 27017
|
||||
metricsEnabled: 'true'
|
||||
probesEnabled: 'true'
|
||||
secrets:
|
||||
secretStoreRef:
|
||||
kind: FreeleapsSecretStore
|
||||
name: freeleaps-main-secret-store
|
||||
target:
|
||||
name: freeleaps-authentication-prod-secrets
|
||||
creationPolicy: Owner
|
||||
refreshInterval: 30s
|
||||
data:
|
||||
- key: jwtSecretKey
|
||||
remoteRef:
|
||||
key: freeleaps-prod-jwt-secret-key
|
||||
type: Secret
|
||||
- key: mongodbUri
|
||||
remoteRef:
|
||||
key: freeleaps-prod-mongodb-uri
|
||||
type: Secret
|
||||
vpa:
|
||||
minAllowed:
|
||||
enabled: true
|
||||
cpu: 50m
|
||||
memory: 64Mi
|
||||
maxAllowed:
|
||||
enabled: true
|
||||
cpu: 200m
|
||||
memory: 128Mi
|
||||
controlledResources:
|
||||
- cpu
|
||||
- memory
|
||||
prometheusRule:
|
||||
name: freepeals-prod-authentication
|
||||
enabled: true
|
||||
namespace: freeleaps-monitoring-system
|
||||
labels:
|
||||
release: kube-prometheus-stack
|
||||
rules:
|
||||
- alert: FreeleapsAuthenticationServiceDown
|
||||
expr: up{job="authentication-service"} == 0
|
||||
for: 5m
|
||||
labels:
|
||||
severity: critical
|
||||
service: authentication-service
|
||||
annotations:
|
||||
summary: Freeleaps Authentication service is down (instance {{ $labels.instance }})
|
||||
description: Freeleaps Authentication service has been down for more than 1 minutes.
|
||||
runbook_url: https://netorgft10898514.sharepoint.com/:w:/s/FreeleapsEngineeringTeam/EUlvzumTsPxCpPAzI3gm9OIB0DCLTjQzzYVL6VsHYZFjxg?e=0dxVr7
|
||||
- alert: FreeleapsAuthenticationServiceHighErrorRate
|
||||
expr: rate(http_requests_total{job="authentication-service",status=~"5.."}[5m]) > 0.1
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
service: authentication-service
|
||||
annotations:
|
||||
summary: High error rate in freeleaps authentication service (instance {{ $labels.instance }})
|
||||
description: Freeleaps Authentication service error rate is {{ $value }} errors per second.
|
||||
runbook_url: https://netorgft10898514.sharepoint.com/:w:/s/FreeleapsEngineeringTeam/EUlvzumTsPxCpPAzI3gm9OIB0DCLTjQzzYVL6VsHYZFjxg?e=0dxVr7
|
||||
|
||||
@ -1,100 +1,114 @@
|
||||
# Default values for the Helm chart
|
||||
# This is a YAML-formatted file.
|
||||
# Declare variables to be passed into your templates.
|
||||
|
||||
# Global settings
|
||||
global:
|
||||
registry: docker.io
|
||||
repository: freeleapsdevops
|
||||
repository: freeleaps
|
||||
nodeSelector: {}
|
||||
|
||||
# Name override settings
|
||||
nameOverride: ""
|
||||
fullnameOverride: ""
|
||||
|
||||
freeleapsAuthenticationApiServer:
|
||||
enabled: true
|
||||
name: freeleaps-authentication-api-server
|
||||
dashboard:
|
||||
enabled: false
|
||||
name: freeleaps-prod-authentication-dashboard
|
||||
title: Authentication Service Dashboard
|
||||
metricsPrefix: freeleaps_authentication
|
||||
logIngest:
|
||||
enabled: false
|
||||
lokiEndpoint: http://loki-gateway.freeleaps-logging-system
|
||||
logPathPattern: /app/log/authentication/*.log
|
||||
logPath: /app/log/authentication
|
||||
fluentbit:
|
||||
enabled: false
|
||||
resources:
|
||||
requests:
|
||||
cpu: 50m
|
||||
memory: 128Mi
|
||||
limits:
|
||||
cpu: 200m
|
||||
memory: 512Mi
|
||||
image: kubesphere/fluent-bit:v4.0-debug
|
||||
imagePullPolicy: IfNotPresent
|
||||
timeKey: record.repr
|
||||
timeFormat: "%Y-%m-%dT%H:%M:%S.%LZ"
|
||||
logPath: /app/log/authentication/*.log
|
||||
authentication:
|
||||
replicas: 1
|
||||
image:
|
||||
registry: ""
|
||||
repository: ""
|
||||
name: api-server
|
||||
tag: latest
|
||||
registry:
|
||||
repository: freeleaps
|
||||
name: authentication
|
||||
tag: 1.0.0
|
||||
imagePullPolicy: IfNotPresent
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 8888
|
||||
containerPort: 8004
|
||||
protocol: TCP
|
||||
resources:
|
||||
requests:
|
||||
cpu: "0.2"
|
||||
memory: "256Mi"
|
||||
limits:
|
||||
cpu: "0.5"
|
||||
memory: "512Mi"
|
||||
env:
|
||||
- name: PYTHONUNBUFFERED
|
||||
value: "1"
|
||||
- name: DEBUG
|
||||
value: "false"
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /api/v1/_/livez
|
||||
port: http
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /api/v1/_/readyz
|
||||
port: http
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 5
|
||||
|
||||
freeleapsAuthenticationWebServer:
|
||||
enabled: true
|
||||
name: freeleaps-authentication-web-server
|
||||
replicas: 1
|
||||
image:
|
||||
registry: ""
|
||||
repository: ""
|
||||
name: web-server
|
||||
tag: latest
|
||||
imagePullPolicy: IfNotPresent
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 80
|
||||
protocol: TCP
|
||||
resources:
|
||||
requests:
|
||||
cpu: "0.1"
|
||||
memory: "128Mi"
|
||||
limits:
|
||||
cpu: "0.3"
|
||||
memory: "256Mi"
|
||||
env:
|
||||
- name: NAMESPACE
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: metadata.namespace
|
||||
- name: API_SERVER_URL
|
||||
value: "http://freeleaps-authentication-api-server.svc.cluster.local:8888/"
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 5
|
||||
|
||||
serviceAccount:
|
||||
create: true
|
||||
name: ""
|
||||
|
||||
service:
|
||||
cpu: "1"
|
||||
memory: "1Gi"
|
||||
# FIXME: Wait until the developers implements the probes APIs
|
||||
probes: {}
|
||||
services:
|
||||
- name: authentication-service
|
||||
type: ClusterIP
|
||||
port: 8004
|
||||
targetPort: 8004
|
||||
serviceMonitor:
|
||||
enabled: false
|
||||
labels:
|
||||
release: kube-prometheus-stack
|
||||
namespace: freeleaps-monitoring-system
|
||||
interval: 30s
|
||||
scrapeTimeout: ""
|
||||
# Defaults to {}, which means doesn't have any ingress
|
||||
ingresses: {}
|
||||
configs:
|
||||
# TZ
|
||||
tz: "America/Settle"
|
||||
# APP_NAME
|
||||
appName: "authentication"
|
||||
# AUTH_SERVICE_ENDPOINT
|
||||
authServiceEndpoint: ""
|
||||
# DEVSVC_WEBAPI_URL_BASE
|
||||
devsvcWebapiUrlBase: "http://devsvc.<namespace>.svc.freeleaps.cluster:<service-port>/api/devsvc"
|
||||
# NOTIFICATION_WEBAPI_URL_BASE
|
||||
notificationWebapiUrlBase: "http://notification.svc.<namespace>.freeleaps.cluster:<service-port>/api/notification"
|
||||
# JWT_ALGORITHM
|
||||
jwtAlgorithm: "HS256"
|
||||
# MONGODB_NAME
|
||||
mongodbName: ""
|
||||
# MONGODB_PORT
|
||||
mongodbPort: "27017"
|
||||
# METRICS_ENABLED
|
||||
metricsEnabled: "false"
|
||||
# PROBES_ENABLED
|
||||
probesEnabled: "false"
|
||||
# AKV secrets configuration
|
||||
secrets:
|
||||
secretStoreRef:
|
||||
kind: FreeleapsSecretStore
|
||||
name: freeleaps-main-secret-store
|
||||
target:
|
||||
name: "freeleaps-authentication-secrets"
|
||||
creationPolicy: "Owner"
|
||||
refreshInterval: 30s
|
||||
data:
|
||||
- key: jwtSecretKey
|
||||
remoteRef:
|
||||
key: "freeleaps-jwt-secret-key"
|
||||
type: Secret
|
||||
- key: mongodbUri
|
||||
remoteRef:
|
||||
key: "freeleaps-mongodb-uri"
|
||||
type: Secret
|
||||
vpa:
|
||||
minAllowed:
|
||||
enabled: false
|
||||
cpu: 100m
|
||||
memory: 64Mi
|
||||
maxAllowed:
|
||||
enabled: true
|
||||
cpu: 100m
|
||||
memory: 256Mi
|
||||
controlledResources:
|
||||
- cpu
|
||||
- memory
|
||||
@ -1,44 +0,0 @@
|
||||
server {
|
||||
listen 80;
|
||||
server_name _;
|
||||
root /usr/share/nginx/html;
|
||||
index index.html;
|
||||
|
||||
# Gzip compression
|
||||
gzip on;
|
||||
gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript;
|
||||
gzip_min_length 1000;
|
||||
gzip_proxied any;
|
||||
|
||||
# Cache control for static assets
|
||||
location /assets/ {
|
||||
expires 1y;
|
||||
add_header Cache-Control "public, no-transform";
|
||||
access_log off;
|
||||
}
|
||||
|
||||
# Handle Vue router history mode
|
||||
location / {
|
||||
try_files $uri $uri/ /index.html;
|
||||
expires -1;
|
||||
add_header Cache-Control "no-store, no-cache, must-revalidate";
|
||||
}
|
||||
|
||||
# Security headers
|
||||
add_header X-Frame-Options "SAMEORIGIN";
|
||||
add_header X-XSS-Protection "1; mode=block";
|
||||
add_header X-Content-Type-Options "nosniff";
|
||||
|
||||
# Proxy API requests to the backend
|
||||
location /api/ {
|
||||
proxy_pass ${API_SERVER_URL};
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection 'upgrade';
|
||||
proxy_set_header Host $host;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
}
|
||||
@ -1,11 +0,0 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
# Default value for API_SERVER_URL if not provided
|
||||
API_SERVER_URL=${API_SERVER_URL:-http://api-server:8888/}
|
||||
|
||||
# Replace the environment variable in the nginx config
|
||||
envsubst '${API_SERVER_URL}' < /etc/nginx/conf.d/default.conf.template > /etc/nginx/conf.d/default.conf
|
||||
|
||||
# Start nginx
|
||||
exec nginx -g 'daemon off;'
|
||||
@ -1,24 +0,0 @@
|
||||
# Use nginx alpine as base image
|
||||
FROM nginx:stable-alpine
|
||||
|
||||
# Install envsubst
|
||||
RUN apk add --no-cache gettext
|
||||
|
||||
# Copy pre-built dist files into nginx
|
||||
COPY dist /usr/share/nginx/html
|
||||
|
||||
# Copy nginx configuration template and entry script
|
||||
COPY nginx/default.conf /etc/nginx/conf.d/default.conf.template
|
||||
COPY docker-entrypoint.sh /docker-entrypoint.sh
|
||||
|
||||
# Make the entry script executable
|
||||
RUN chmod ug+x /docker-entrypoint.sh
|
||||
|
||||
# Set default environment variable
|
||||
ENV API_SERVER_URL=http://api-server:8888/
|
||||
|
||||
# Expose port 80
|
||||
EXPOSE 80
|
||||
|
||||
# Use the entry script as the entrypoint
|
||||
ENTRYPOINT ["/docker-entrypoint.sh"]
|
||||
@ -8,41 +8,22 @@ spec:
|
||||
layout: FAST_API_VUE_3
|
||||
serviceName: freeleaps-authentication
|
||||
executeMode: fully
|
||||
branch: main
|
||||
branch: dev
|
||||
components:
|
||||
- name: freeleapsAuthenticationApiServer
|
||||
- name: authentication
|
||||
root: '.'
|
||||
language: python
|
||||
dependenciesManager: pip
|
||||
requirementsFile: requirements.txt
|
||||
buildCacheEnabled: true
|
||||
buildAgentImage: python:3.10-slim-buster
|
||||
buildAgentImage: python:3.12-slim
|
||||
buildArtifacts:
|
||||
- '.'
|
||||
imageBuilder: dind
|
||||
dockerfilePath: .freeleaps/devops/api-server.Dockerfile
|
||||
imageName: freeleaps-authentication-api-server
|
||||
dockerfilePath: Dockerfile
|
||||
imageName: authentication
|
||||
imageBuildRoot: '.'
|
||||
imageReleaseArchitectures:
|
||||
- linux/amd64
|
||||
- linux/arm64/v8
|
||||
- name: freeleapsAuthenticationWebServer
|
||||
root: 'web'
|
||||
language: javascript
|
||||
dependenciesManager: pnpm
|
||||
pnpmPackageJsonFile: package.json
|
||||
buildCacheEnabled: true
|
||||
buildAgentImage: node:lts
|
||||
buildCommand: 'pnpm -r build'
|
||||
buildArtifacts:
|
||||
- 'dist'
|
||||
- 'public'
|
||||
imageBuilder: dind
|
||||
dockerfilePath: ../.freeleaps/devops/web-server.Dockerfile
|
||||
imageName: freeleaps-authentication-web-server
|
||||
imageBuildRoot: '.'
|
||||
imageReleaseArchitectures:
|
||||
- linux/amd64
|
||||
- linux/arm64/v8
|
||||
|
||||
|
||||
|
||||
@ -10,39 +10,18 @@ spec:
|
||||
executeMode: fully
|
||||
branch: main
|
||||
components:
|
||||
- name: freeleapsAuthenticationApiServer
|
||||
- name: authentication
|
||||
root: '.'
|
||||
language: python
|
||||
dependenciesManager: pip
|
||||
requirementsFile: requirements.txt
|
||||
buildCacheEnabled: true
|
||||
buildAgentImage: python:3.10-slim-buster
|
||||
buildAgentImage: python:3.12-slim
|
||||
buildArtifacts:
|
||||
- '.'
|
||||
imageBuilder: dind
|
||||
dockerfilePath: .freeleaps/devops/api-server.Dockerfile
|
||||
imageName: freeleaps-authentication-api-server
|
||||
dockerfilePath: Dockerfile
|
||||
imageName: authentication
|
||||
imageBuildRoot: '.'
|
||||
imageReleaseArchitectures:
|
||||
- linux/amd64
|
||||
- linux/arm64/v8
|
||||
- name: freeleapsAuthenticationWebServer
|
||||
root: 'web'
|
||||
language: javascript
|
||||
dependenciesManager: pnpm
|
||||
pnpmPackageJsonFile: package.json
|
||||
buildCacheEnabled: true
|
||||
buildAgentImage: node:lts
|
||||
buildCommand: 'pnpm -r build'
|
||||
buildArtifacts:
|
||||
- 'dist'
|
||||
- 'public'
|
||||
imageBuilder: dind
|
||||
dockerfilePath: ../.freeleaps/devops/web-server.Dockerfile
|
||||
imageName: freeleaps-authentication-web-server
|
||||
imageBuildRoot: '.'
|
||||
imageReleaseArchitectures:
|
||||
- linux/amd64
|
||||
- linux/arm64/v8
|
||||
|
||||
|
||||
|
||||
57
.gitignore
vendored
57
.gitignore
vendored
@ -1,54 +1,7 @@
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# Virtual Environment
|
||||
.idea
|
||||
.vscode
|
||||
__pycache__
|
||||
venv/
|
||||
env/
|
||||
ENV/
|
||||
.env
|
||||
.venv
|
||||
|
||||
# IDE
|
||||
.idea/
|
||||
.vscode/
|
||||
*.swp
|
||||
*.swo
|
||||
.DS_Store
|
||||
|
||||
# FastAPI
|
||||
.pytest_cache/
|
||||
coverage.xml
|
||||
.coverage
|
||||
htmlcov/
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
logs/
|
||||
|
||||
# Local development
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
|
||||
# Docker
|
||||
docker-compose.override.yml
|
||||
log/
|
||||
webapi/log/
|
||||
39
Dockerfile
Normal file
39
Dockerfile
Normal file
@ -0,0 +1,39 @@
|
||||
FROM python:3.12-slim
|
||||
|
||||
# docker settings
|
||||
ARG CONTAINER_APP_ROOT="/app"
|
||||
ENV APP_NAME="authentication"
|
||||
|
||||
ENV DEVSVC_WEBAPI_URL_BASE="http://devsvc:8007/api/devsvc"
|
||||
ENV NOTIFICATION_WEBAPI_URL_BASE="http://notification:8003/api/notification/"
|
||||
|
||||
ENV JWT_SECRET_KEY="8f87ca8c3c9c3df09a9c78e0adb0927855568f6072d9efc892534aee35f5867b"
|
||||
ENV JWT_ALGORITHM="HS256"
|
||||
|
||||
#site_settings
|
||||
ENV SERVICE_API_ACCESS_HOST=0.0.0.0
|
||||
ENV SERVICE_API_ACCESS_PORT=8004
|
||||
ENV MONGODB_NAME=freeleaps2
|
||||
ENV MONGODB_PORT=27017
|
||||
ENV MONGODB_URI="mongodb://localhost:27017/"
|
||||
|
||||
# Freeleaps Auth Config
|
||||
ENV AUTH_SERVICE_ENDPOINT=""
|
||||
|
||||
#log_settings
|
||||
ENV LOG_BASE_PATH=$CONTAINER_APP_ROOT/log/$APP_NAME
|
||||
ENV BACKEND_LOG_FILE_NAME=$APP_NAME
|
||||
ENV APPLICATION_ACTIVITY_LOG=$APP_NAME-activity
|
||||
|
||||
|
||||
WORKDIR ${CONTAINER_APP_ROOT}
|
||||
COPY requirements.txt .
|
||||
|
||||
RUN pip install --upgrade pip
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY . ${CONTAINER_APP_ROOT}
|
||||
|
||||
EXPOSE ${SERVICE_API_ACCESS_PORT}
|
||||
# Using shell to expand environemnt to enure pass the actual environment value to uvicorn
|
||||
CMD uvicorn webapi.main:app --reload --port=$SERVICE_API_ACCESS_PORT --host=$SERVICE_API_ACCESS_HOST
|
||||
68
README.md
68
README.md
@ -1,68 +0,0 @@
|
||||
# freeleaps-authentication
|
||||
|
||||
This repo create with `FastAPI` and `Vue 3`, powered by `freeleaps.com`.
|
||||
|
||||
> **Please do not delete files under `${PROJECT_ROOT}/.freeleaps`**, these files used to supports DevOps workflow.
|
||||
|
||||
## Project Layout
|
||||
|
||||
```
|
||||
├── .freeleaps/ # Freeleaps configuration
|
||||
│ ├── devops/ # DevOps related configurations
|
||||
│ └── project.yaml # Project configuration file
|
||||
├── app/ # FastAPI backend application
|
||||
│ ├── modules/ # Application modules
|
||||
│ │ ├── sys/ # System related modules
|
||||
│ │ └── __init__.py # Module initialization
|
||||
│ ├── utils/ # Utility functions
|
||||
│ ├── routes.py # API route definitions
|
||||
│ ├── schema.py # Pydantic schemas
|
||||
│ └── setup_app.py # Application setup and configuration
|
||||
├── web/ # Vue 3 frontend application
|
||||
│ ├── src/ # Source code
|
||||
│ │ ├── assets/ # Static assets
|
||||
│ │ ├── components/ # Vue components
|
||||
│ │ ├── router/ # Vue router configuration
|
||||
│ │ ├── stores/ # Pinia stores
|
||||
│ │ ├── views/ # Page views
|
||||
│ │ ├── App.vue # Root Vue component
|
||||
│ │ └── main.ts # Application entry point
|
||||
│ ├── public/ # Public static files
|
||||
│ ├── e2e/ # End-to-end tests
|
||||
│ ├── package.json # Node.js dependencies
|
||||
│ ├── vite.config.ts # Vite configuration
|
||||
│ ├── tsconfig.json # TypeScript configuration
|
||||
│ └── README.md # Frontend specific documentation
|
||||
├── main.py # FastAPI application entry point
|
||||
├── requirements.txt # Python dependencies
|
||||
├── .env.example # Environment variables template
|
||||
├── .gitignore # Git ignore rules
|
||||
└── README.md # This file
|
||||
```
|
||||
|
||||
### Backend (FastAPI)
|
||||
- **main.py**: Application entry point and server startup
|
||||
- **app/**: Contains all backend application code
|
||||
- **modules/**: Feature-based modules organization
|
||||
- **routes.py**: API endpoint definitions
|
||||
- **schema.py**: Data validation schemas using Pydantic
|
||||
- **setup_app.py**: Application configuration and middleware setup
|
||||
- **utils/**: Shared utility functions
|
||||
|
||||
### Frontend (Vue 3)
|
||||
- **web/**: Complete Vue 3 application with TypeScript
|
||||
- **src/**: Source code with modern Vue 3 composition API
|
||||
- **components/**: Reusable Vue components
|
||||
- **views/**: Page-level components
|
||||
- **router/**: Client-side routing configuration
|
||||
- **stores/**: State management using Pinia
|
||||
- **assets/**: Static assets like images, styles
|
||||
- **Vite**: Fast build tool and development server
|
||||
- **TypeScript**: Type-safe JavaScript development
|
||||
- **ESLint & Prettier**: Code linting and formatting
|
||||
|
||||
### Configuration
|
||||
- **.freeleaps/**: Platform-specific configurations
|
||||
- **.env.example**: Environment variables template
|
||||
- **requirements.txt**: Python package dependencies
|
||||
- **package.json**: Node.js dependencies and scripts
|
||||
@ -1,20 +0,0 @@
|
||||
import pathlib
|
||||
import pkgutil
|
||||
from importlib import import_module
|
||||
from importlib.util import find_spec
|
||||
|
||||
def _modules(postfix="") -> list:
|
||||
"""
|
||||
Get all modules in the current package.
|
||||
"""
|
||||
return [
|
||||
import_module(f".{name}{postfix}", package=__name__)
|
||||
for (_, name, _) in pkgutil.iter_modules([str(pathlib.Path(__file__).parent)])
|
||||
if find_spec(f".{name}{postfix}", package=__name__)
|
||||
]
|
||||
|
||||
def detect_modules() -> list:
|
||||
"""
|
||||
Detect all modules in the current package.
|
||||
"""
|
||||
return _modules(".modules")
|
||||
@ -1,41 +0,0 @@
|
||||
from fastapi import APIRouter, status
|
||||
from starlette.responses import JSONResponse
|
||||
|
||||
from app.utils.config import settings
|
||||
from app.schema import Response
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@router.get('/_/livez', status_code=status.HTTP_200_OK, response_model=Response)
|
||||
async def liveness() -> JSONResponse:
|
||||
"""
|
||||
Liveness check probe endpoint.
|
||||
You can modify the logic here to check the health of your application.
|
||||
But do not modify the response format or remove this endpoint.
|
||||
Its will break the health check of the deployment.
|
||||
"""
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_200_OK,
|
||||
content={
|
||||
'code': status.HTTP_200_OK,
|
||||
'msg': 'ok',
|
||||
'payload': None
|
||||
}
|
||||
)
|
||||
|
||||
@router.get('/_/readyz', status_code=status.HTTP_200_OK, response_model=Response)
|
||||
async def readiness() -> JSONResponse:
|
||||
"""
|
||||
Readiness check probe endpoint.
|
||||
You can modify the logic here to check the health of your application.
|
||||
But do not modify the response format or remove this endpoint.
|
||||
Its will break the health check of the deployment.
|
||||
"""
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_200_OK,
|
||||
content={
|
||||
'code': status.HTTP_200_OK,
|
||||
'msg': 'ok',
|
||||
'payload': None
|
||||
}
|
||||
)
|
||||
@ -1,25 +0,0 @@
|
||||
from fastapi import APIRouter, status
|
||||
|
||||
from app.schema import Response
|
||||
from app.modules.sys.routes import router as sys_router
|
||||
from app.utils.config import settings
|
||||
|
||||
api_router = APIRouter()
|
||||
root_router = APIRouter()
|
||||
|
||||
api_router.include_router(
|
||||
sys_router,
|
||||
tags=["System"],
|
||||
)
|
||||
|
||||
@root_router.get('/', status_code=status.HTTP_200_OK, response_model=Response)
|
||||
def root() -> dict:
|
||||
return {
|
||||
'code': status.HTTP_200_OK,
|
||||
'msg': 'ok',
|
||||
'payload': {
|
||||
'name': settings.PROJECT_NAME,
|
||||
'version': settings.APP_VERSION,
|
||||
'environment': settings.ENV,
|
||||
}
|
||||
}
|
||||
@ -1,8 +0,0 @@
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
class Response(BaseModel):
|
||||
code: int
|
||||
msg: str
|
||||
payload: dict[Any, Any] | None = None
|
||||
@ -1,49 +0,0 @@
|
||||
from fastapi import FastAPI
|
||||
from starlette.middleware.cors import CORSMiddleware
|
||||
|
||||
from app.utils.config import settings
|
||||
from app.utils.logger import logger
|
||||
from app.routes import api_router, root_router
|
||||
|
||||
def setup_routers(app: FastAPI) -> None:
|
||||
# Register root router without prefix to handle root level routes
|
||||
app.include_router(root_router)
|
||||
# Register API router with configured prefix
|
||||
app.include_router(
|
||||
api_router,
|
||||
prefix=settings.API_V1_STR,
|
||||
)
|
||||
|
||||
def setup_cors(app: FastAPI) -> None:
|
||||
origins = []
|
||||
|
||||
if settings.BACKEND_CORS_ORIGINS:
|
||||
origins_raw = settings.BACKEND_CORS_ORIGINS.split(",")
|
||||
|
||||
for origin in origins_raw:
|
||||
use_origin = origin.strip()
|
||||
origins.append(use_origin)
|
||||
|
||||
logger.info(f"Allowed CORS origins: {origins}")
|
||||
|
||||
app.user_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=origins,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
app = FastAPI(
|
||||
title=settings.PROJECT_NAME,
|
||||
version=settings.APP_VERSION,
|
||||
docs_url=None if settings.is_production() else "/docs",
|
||||
redoc_url=None if settings.is_production() else "/redoc",
|
||||
openapi_url=f"{settings.API_V1_STR}/openapi.json",
|
||||
)
|
||||
|
||||
setup_routers(app)
|
||||
setup_cors(app)
|
||||
|
||||
return app
|
||||
@ -1,35 +0,0 @@
|
||||
from enum import Enum
|
||||
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
class AppEnvironment(str, Enum):
|
||||
PRODUCTION = "prod"
|
||||
DEVELOPMENT = "dev"
|
||||
TESTING = "test"
|
||||
|
||||
class Config(BaseSettings):
|
||||
|
||||
API_V1_STR: str = "/api/v1"
|
||||
|
||||
APP_VERSION: str = "Unversioned API"
|
||||
ENV: AppEnvironment = AppEnvironment.DEVELOPMENT
|
||||
|
||||
UVICORN_HOST: str = "0.0.0.0"
|
||||
UVICORN_PORT: int = 8888
|
||||
|
||||
BACKEND_CORS_ORIGINS: str = ""
|
||||
|
||||
PROJECT_NAME: str = "freeleaps-authentication"
|
||||
|
||||
LOGGING_LEVEL: str = "INFO"
|
||||
|
||||
def is_development(self) -> bool:
|
||||
return self.ENV == AppEnvironment.DEVELOPMENT
|
||||
|
||||
def is_testing(self) -> bool:
|
||||
return self.ENV == AppEnvironment.TESTING
|
||||
|
||||
def is_production(self) -> bool:
|
||||
return self.ENV == AppEnvironment.PRODUCTION
|
||||
|
||||
settings = Config(_env_file=".env", _env_file_encoding="utf-8")
|
||||
@ -1,8 +0,0 @@
|
||||
import logging
|
||||
|
||||
from app.utils.config import settings
|
||||
|
||||
formatter = "%(levelname)s: %(asctime)s - %(module)s - %(funcName)s - %(message)s"
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logging.basicConfig(level=settings.LOGGING_LEVEL, format=formatter)
|
||||
0
backend/annotation/__init__.py
Normal file
0
backend/annotation/__init__.py
Normal file
117
backend/application/signin_hub.py
Normal file
117
backend/application/signin_hub.py
Normal file
@ -0,0 +1,117 @@
|
||||
from typing import Optional, Tuple, List
|
||||
|
||||
from backend.services.permission.permission_service import PermissionService
|
||||
from backend.services.permission.role_service import RoleService
|
||||
from common.constants.region import UserRegion
|
||||
from common.log.log_utils import log_entry_exit_async
|
||||
from backend.business.signin_manager import SignInManager
|
||||
from backend.models.user.constants import UserLoginAction
|
||||
|
||||
|
||||
class SignInHub:
|
||||
def __init__(self) -> None:
|
||||
self.signin_manager = SignInManager()
|
||||
# TODO: Dax - Event dispatch and notification center
|
||||
# self.notification_center = NotificationCenter(sender_id=settings.SYSTEM_USER_ID)
|
||||
# self.event_dispatcher = UserEventDispatcher(owner_id=settings.SYSTEM_USER_ID)
|
||||
|
||||
@log_entry_exit_async
|
||||
async def signin_with_email_and_code(
|
||||
self, email: str, code: str, host: str, time_zone: Optional[str] = "UTC"
|
||||
) -> Tuple[UserLoginAction, Optional[int], Optional[str], Optional[str], Optional[UserRegion], Optional[List[str]],
|
||||
Optional[List[str]]]:
|
||||
"""
|
||||
Interacts with the business layer to handle the sign-in process with email and code.
|
||||
Try to signin with email and code.
|
||||
create a new user account, if the email address has never been used before.
|
||||
|
||||
Args:
|
||||
email (str): email address
|
||||
code (str): auth code to be verified
|
||||
host (str): the host address by which the client access the frontend service
|
||||
time_zone (Optional[str]): time zone of the frontend service
|
||||
Returns:
|
||||
[int, Optional[int], Optional[str], Optional[str]]:
|
||||
- int: UserLoginAction
|
||||
- Optional[int]: user role
|
||||
- Optional[str]: user_id
|
||||
- Optional[str]: flid
|
||||
- Optional[str]: region
|
||||
- Optional[str]: user role names
|
||||
- Optional[str]: user permission keys
|
||||
"""
|
||||
return await self.signin_manager.signin_with_email_and_code(
|
||||
email=email, code=code, host=host, time_zone=time_zone
|
||||
)
|
||||
|
||||
@log_entry_exit_async
|
||||
async def signin_with_email_and_password(
|
||||
self, email: str, password: str
|
||||
) -> Tuple[UserLoginAction, Optional[int], Optional[str], Optional[str], Optional[List[str]], Optional[List[str]]]:
|
||||
"""Try to signin with email and password.
|
||||
|
||||
Args:
|
||||
email (str): email address
|
||||
password (str): password to be verified
|
||||
|
||||
Returns:
|
||||
[int, Optional[int], Optional[str], Optional[str]]:
|
||||
- int: UserLoginAction
|
||||
- Optional[int]: user role
|
||||
- Optional[str]: user_id
|
||||
- Optional[str]: flid
|
||||
- Optional[List[str]]: user role names
|
||||
- Optional[List[str]]: user permission keys
|
||||
"""
|
||||
return await self.signin_manager.signin_with_email_and_password(
|
||||
email=email, password=password
|
||||
)
|
||||
|
||||
@log_entry_exit_async
|
||||
async def update_new_user_flid(
|
||||
self, user_id: str, user_flid: str
|
||||
) -> Tuple[UserLoginAction, Optional[str]]:
|
||||
return await self.signin_manager.update_new_user_flid(
|
||||
user_id=user_id, user_flid=user_flid
|
||||
)
|
||||
|
||||
@log_entry_exit_async
|
||||
async def try_signin_with_email(self, email: str, host: str) -> UserLoginAction:
|
||||
return await self.signin_manager.try_signin_with_email(email=email, host=host)
|
||||
|
||||
@log_entry_exit_async
|
||||
async def try_magicleaps_signin_with_email(self, email: str, host: str) -> UserLoginAction:
|
||||
return await self.signin_manager.try_magicleaps_signin_with_email(email=email, host=host)
|
||||
|
||||
@log_entry_exit_async
|
||||
async def reset_password_through_email(self, email: str, host: str) -> int:
|
||||
return await self.signin_manager.reset_password_through_email(
|
||||
email=email, host=host
|
||||
)
|
||||
|
||||
@log_entry_exit_async
|
||||
async def update_user_password(self, user_id: str, password: str) -> dict[str, any]:
|
||||
return await self.signin_manager.update_user_password(
|
||||
user_id=user_id, password=password
|
||||
)
|
||||
|
||||
@log_entry_exit_async
|
||||
async def update_user_password_no_depot(self, user_id: str, password: str) -> dict[str, any]:
|
||||
return await self.signin_manager.update_user_password_no_depot(
|
||||
user_id=user_id, password=password
|
||||
)
|
||||
|
||||
@log_entry_exit_async
|
||||
async def send_email_code(self, sender_id: str, email: str) -> dict[str, any]:
|
||||
result = await self.signin_manager.send_email_code(sender_id, email)
|
||||
return {"succeeded": result}
|
||||
|
||||
@log_entry_exit_async
|
||||
async def send_mobile_code(self, sender_id: str, mobile: str) -> dict[str, any]:
|
||||
result = await self.signin_manager.send_mobile_code(sender_id, mobile)
|
||||
return {"succeeded": result}
|
||||
|
||||
@log_entry_exit_async
|
||||
async def sign_out(self, identity: str) -> bool:
|
||||
# TODO: to be implemented
|
||||
return True
|
||||
414
backend/business/signin_manager.py
Normal file
414
backend/business/signin_manager.py
Normal file
@ -0,0 +1,414 @@
|
||||
import random
|
||||
from typing import Tuple, Optional, List
|
||||
|
||||
|
||||
from backend.services.auth.user_auth_service import UserAuthService
|
||||
from common.constants.region import UserRegion
|
||||
from common.utils.region import RegionHandler
|
||||
from backend.models.user.constants import (
|
||||
NewUserMethod,
|
||||
)
|
||||
from backend.models.user.constants import UserLoginAction
|
||||
from backend.services.user.user_management_service import (
|
||||
UserManagementService,
|
||||
)
|
||||
from backend.services.code_depot.code_depot_service import (
|
||||
CodeDepotService,
|
||||
)
|
||||
from common.log.module_logger import ModuleLogger
|
||||
from common.utils.string import check_password_complexity
|
||||
from common.exception.exceptions import InvalidDataError
|
||||
from backend.services.notification.notification_service import (
|
||||
NotificationService,
|
||||
)
|
||||
from backend.models.user.constants import (
|
||||
AuthType,
|
||||
)
|
||||
from common.config.app_settings import app_settings
|
||||
|
||||
|
||||
class SignInManager:
|
||||
def __init__(self):
|
||||
self.user_auth_service = UserAuthService()
|
||||
self.region_handler = RegionHandler()
|
||||
self.user_management_service = UserManagementService()
|
||||
self.module_logger = ModuleLogger(sender_id=SignInManager)
|
||||
self.code_depot_service = CodeDepotService()
|
||||
self.notification_service = NotificationService()
|
||||
|
||||
async def signin_with_email_and_code(
|
||||
self, email: str, code: str, host: str, time_zone: Optional[str] = "UTC"
|
||||
) -> Tuple[UserLoginAction, Optional[int], Optional[str], Optional[UserRegion], Optional[str], Optional[List[str]], Optional[List[str]]]:
|
||||
"""Try to signin with email and code.
|
||||
create a new user account, if the email address has never been used before.
|
||||
|
||||
Args:
|
||||
email (str): email address
|
||||
code (str): auth code to be verified
|
||||
host (str): the host address by which the client access the frontend service, for detecting UserRegion
|
||||
time_zone (str, optional): timezone of the frontend service
|
||||
|
||||
Returns:
|
||||
[int, Optional[int], Optional[str], Optional[str]]:
|
||||
- int: UserLoginAction
|
||||
- Optional[int]: user role
|
||||
- Optional[str]: user_id
|
||||
- Optional[str]: flid
|
||||
- Optional[str]: region
|
||||
- Optional[str]: user role names
|
||||
- Optional[str]: user permission keys
|
||||
"""
|
||||
# check if the user account exist
|
||||
user_id = await self.user_auth_service.get_user_id_by_email(email)
|
||||
|
||||
# if it cannot find user account according to the email address, new user
|
||||
is_new_user = user_id is None
|
||||
preferred_region = self.region_handler.detect_from_host(host)
|
||||
|
||||
# verify the email through auth code
|
||||
if await self.user_auth_service.verify_email_with_code(email, code):
|
||||
if is_new_user:
|
||||
user_account = (
|
||||
await self.user_management_service.create_new_user_account(
|
||||
method=NewUserMethod.EMAIL, region=preferred_region
|
||||
)
|
||||
)
|
||||
user_id = str(user_account.id)
|
||||
await self.user_management_service.initialize_new_user_data(
|
||||
user_id=str(user_account.id),
|
||||
method=NewUserMethod.EMAIL,
|
||||
email_address=email,
|
||||
region=preferred_region,
|
||||
time_zone=time_zone,
|
||||
)
|
||||
|
||||
user_account = await self.user_management_service.get_account_by_id(
|
||||
user_id=user_id
|
||||
)
|
||||
role_names, permission_keys = await self.user_management_service.get_role_and_permission_by_user_id(
|
||||
user_id=user_id
|
||||
)
|
||||
if await self.user_auth_service.is_flid_reset_required(user_id):
|
||||
return (
|
||||
UserLoginAction.REVIEW_AND_REVISE_FLID,
|
||||
user_account.user_role,
|
||||
user_id,
|
||||
email.split("@")[0],
|
||||
preferred_region,
|
||||
role_names,
|
||||
permission_keys,
|
||||
)
|
||||
|
||||
user_flid = await self.user_auth_service.get_user_flid(user_id)
|
||||
if await self.user_auth_service.is_password_reset_required(user_id):
|
||||
return (
|
||||
UserLoginAction.NEW_USER_SET_PASSWORD,
|
||||
user_account.user_role,
|
||||
user_id,
|
||||
user_flid,
|
||||
preferred_region,
|
||||
role_names,
|
||||
permission_keys,
|
||||
)
|
||||
return (
|
||||
UserLoginAction.EXISTING_USER_PASSWORD_REQUIRED,
|
||||
user_account.user_role,
|
||||
user_id,
|
||||
user_flid,
|
||||
preferred_region,
|
||||
role_names,
|
||||
permission_keys,
|
||||
)
|
||||
else:
|
||||
await self.module_logger.log_warning(
|
||||
warning="The auth code is invalid.",
|
||||
properties={"email": email, "code": code},
|
||||
)
|
||||
# TODO refactor this to reduce None
|
||||
return UserLoginAction.VERIFY_EMAIL_WITH_AUTH_CODE, None, None, None, None, None, None
|
||||
|
||||
async def signin_with_email_and_password(
|
||||
self, email: str, password: str
|
||||
) -> Tuple[UserLoginAction, Optional[int], Optional[str], Optional[str], Optional[List[str]], Optional[List[str]]]:
|
||||
|
||||
# check if the user account exist
|
||||
user_id = await self.user_auth_service.get_user_id_by_email(email)
|
||||
|
||||
# if it cannot find user account according to the email address, new user
|
||||
is_new_user = user_id is None
|
||||
|
||||
if is_new_user:
|
||||
# cannot find the email address
|
||||
# TODO refactor this to reduce None
|
||||
return (UserLoginAction.VERIFY_EMAIL_WITH_AUTH_CODE, None, None, None, None, None)
|
||||
else:
|
||||
if await self.user_auth_service.is_password_reset_required(user_id):
|
||||
# password hasn't been set before, save password for the user
|
||||
return (
|
||||
UserLoginAction.NEW_USER_SET_PASSWORD,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None
|
||||
)
|
||||
else:
|
||||
if await self.user_auth_service.verify_user_with_password(
|
||||
user_id, password
|
||||
):
|
||||
user_account = await self.user_management_service.get_account_by_id(
|
||||
user_id=user_id
|
||||
)
|
||||
role_names, permission_keys = await self.user_management_service.get_role_and_permission_by_user_id(user_id)
|
||||
if await self.user_auth_service.is_flid_reset_required(user_id):
|
||||
return (
|
||||
UserLoginAction.REVIEW_AND_REVISE_FLID,
|
||||
user_account.user_role,
|
||||
user_id,
|
||||
email.split("@")[0],
|
||||
role_names,
|
||||
permission_keys,
|
||||
)
|
||||
|
||||
user_flid = await self.user_auth_service.get_user_flid(user_id)
|
||||
|
||||
# password verification passed
|
||||
return (
|
||||
UserLoginAction.USER_SIGNED_IN,
|
||||
user_account.user_role,
|
||||
user_id,
|
||||
user_flid,
|
||||
role_names,
|
||||
permission_keys
|
||||
)
|
||||
else:
|
||||
# ask user to input password again.
|
||||
# TODO: we need to limit times of user to input the wrong password
|
||||
# TODO refactor this to reduce None
|
||||
return (
|
||||
UserLoginAction.EXISTING_USER_PASSWORD_REQUIRED,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None
|
||||
)
|
||||
|
||||
async def update_new_user_flid(
|
||||
self, user_id: str, user_flid: str
|
||||
) -> Tuple[UserLoginAction, Optional[str]]:
|
||||
if await self.user_auth_service.is_flid_available(user_flid):
|
||||
|
||||
code_depot_email = "{}@freeleaps.com".format(user_flid)
|
||||
result = await self.code_depot_service.create_depot_user(
|
||||
user_flid, user_id, code_depot_email
|
||||
)
|
||||
|
||||
if not result:
|
||||
await self.module_logger.log_error(
|
||||
error="Failed to create depot user for {} with flid {} and email {}".format(
|
||||
user_id, user_flid, code_depot_email
|
||||
),
|
||||
properties={
|
||||
"user_id": user_id,
|
||||
"user_flid": user_flid,
|
||||
"code_depot_email": code_depot_email,
|
||||
},
|
||||
)
|
||||
return (
|
||||
UserLoginAction.REVIEW_AND_REVISE_FLID,
|
||||
"{}{}".format(user_flid, random.randint(100, 999)),
|
||||
)
|
||||
await self.user_auth_service.update_flid(user_id, user_flid)
|
||||
if await self.user_auth_service.is_password_reset_required(user_id):
|
||||
return (
|
||||
UserLoginAction.NEW_USER_SET_PASSWORD,
|
||||
user_flid,
|
||||
)
|
||||
else:
|
||||
return (
|
||||
UserLoginAction.EXISTING_USER_PASSWORD_REQUIRED,
|
||||
user_flid,
|
||||
)
|
||||
else:
|
||||
return (
|
||||
UserLoginAction.REVIEW_AND_REVISE_FLID,
|
||||
"{}{}".format(user_flid, random.randint(100, 999)),
|
||||
)
|
||||
|
||||
async def try_signin_with_email(self, email: str, host: str) -> UserLoginAction:
|
||||
"""try signin through email, generate auth code and send to the email address
|
||||
|
||||
Args:
|
||||
email (str): email address
|
||||
host (str): host url that user tried to sign in
|
||||
|
||||
Returns:
|
||||
int: UserLoginAction
|
||||
"""
|
||||
user_id = await self.user_auth_service.get_user_id_by_email(email)
|
||||
|
||||
is_password_reset_required = False
|
||||
if user_id:
|
||||
is_password_reset_required = (
|
||||
await self.user_auth_service.is_password_reset_required(user_id)
|
||||
)
|
||||
|
||||
if user_id is None or is_password_reset_required:
|
||||
# send auth code through email if the email address
|
||||
# hasn't been associated with any account.
|
||||
# Or if the user's password is empty, which means the user's pasword hasn't been set.
|
||||
|
||||
mail_code = await self.user_auth_service.generate_auth_code_for_object(
|
||||
email, AuthType.EMAIL
|
||||
)
|
||||
await self.notification_service.send_notification(
|
||||
sender_id=app_settings.SYSTEM_USER_ID,
|
||||
channels=["2"], # 2 maps to email in NotificationChannel
|
||||
receiver_id=email,
|
||||
subject="email",
|
||||
event="authentication",
|
||||
properties={"auth_code": mail_code},
|
||||
# TODO: reconsider necessity of adding region info here
|
||||
# region=RegionHandler().detect_from_host(host),
|
||||
)
|
||||
return UserLoginAction.VERIFY_EMAIL_WITH_AUTH_CODE
|
||||
else:
|
||||
return UserLoginAction.EXISTING_USER_PASSWORD_REQUIRED
|
||||
|
||||
async def try_magicleaps_signin_with_email(self, email: str, host: str) -> UserLoginAction:
|
||||
"""try signin through email using MagicLeaps branding, generate auth code and send to the email address
|
||||
|
||||
Args:
|
||||
email (str): email address
|
||||
host (str): host url that user tried to sign in
|
||||
|
||||
Returns:
|
||||
int: UserLoginAction
|
||||
"""
|
||||
user_id = await self.user_auth_service.get_user_id_by_email(email)
|
||||
|
||||
is_password_reset_required = False
|
||||
if user_id:
|
||||
is_password_reset_required = (
|
||||
await self.user_auth_service.is_password_reset_required(user_id)
|
||||
)
|
||||
|
||||
if user_id is None or is_password_reset_required:
|
||||
# send auth code through email if the email address
|
||||
# hasn't been associated with any account.
|
||||
# Or if the user's password is empty, which means the user's pasword hasn't been set.
|
||||
|
||||
mail_code = await self.user_auth_service.generate_auth_code_for_object(
|
||||
email, AuthType.EMAIL
|
||||
)
|
||||
await self.notification_service.send_notification(
|
||||
sender_id=app_settings.SYSTEM_USER_ID,
|
||||
channels=["2"], # 2 maps to email in NotificationChannel
|
||||
receiver_id=email,
|
||||
subject="email",
|
||||
event="magicleaps_authentication", # Use the new event type
|
||||
properties={"auth_code": mail_code},
|
||||
# TODO: reconsider necessity of adding region info here
|
||||
# region=RegionHandler().detect_from_host(host),
|
||||
)
|
||||
return UserLoginAction.VERIFY_EMAIL_WITH_AUTH_CODE
|
||||
else:
|
||||
return UserLoginAction.EXISTING_USER_PASSWORD_REQUIRED
|
||||
|
||||
async def reset_password_through_email(self, email: str, host: str) -> int:
|
||||
"""verify the email is exisitng, clear the existing password,
|
||||
generate auth code and send to the email address
|
||||
so in the following steps, the user can reset their password.
|
||||
|
||||
Args:
|
||||
email (str): email address
|
||||
host (str): host that user will perform the reset on
|
||||
|
||||
Returns:
|
||||
int: UserLoginAction
|
||||
"""
|
||||
|
||||
user_id = await self.user_auth_service.get_user_id_by_email(email)
|
||||
if user_id is not None:
|
||||
# send auth code through email if the email address
|
||||
# hasn been associated with any account.
|
||||
mail_code = await self.user_auth_service.generate_auth_code_for_object(
|
||||
email, AuthType.EMAIL
|
||||
)
|
||||
await self.notification_service.send_notification(
|
||||
sender_id=app_settings.SYSTEM_USER_ID,
|
||||
channels=["2"], # 2 maps to email in NotificationChannel
|
||||
receiver_id=email,
|
||||
subject="email",
|
||||
event="authentication",
|
||||
properties={"auth_code": mail_code},
|
||||
)
|
||||
|
||||
await self.user_auth_service.reset_password(user_id)
|
||||
|
||||
return UserLoginAction.VERIFY_EMAIL_WITH_AUTH_CODE
|
||||
else:
|
||||
return UserLoginAction.EMAIL_NOT_ASSOCIATED_WITH_USER
|
||||
|
||||
async def update_user_password(self, user_id: str, password: str) -> dict[str, any]:
|
||||
error_message = """
|
||||
Password does not pass complexity requirements:
|
||||
- At least one lowercase character
|
||||
- At least one uppercase character
|
||||
- At least one digit
|
||||
- At least one special character (punctuation, brackets, quotes, etc.)
|
||||
"""
|
||||
if not check_password_complexity(password):
|
||||
raise InvalidDataError(error_message)
|
||||
|
||||
user_flid = await self.user_auth_service.get_user_flid(user_id)
|
||||
await self.user_auth_service.save_password_auth_method(
|
||||
user_id, user_flid, password
|
||||
)
|
||||
return {"succeeded": True}
|
||||
|
||||
async def update_user_password_no_depot(self, user_id: str, password: str) -> dict[str, any]:
|
||||
error_message = """
|
||||
Password does not pass complexity requirements:
|
||||
- At least one lowercase character
|
||||
- At least one uppercase character
|
||||
- At least one digit
|
||||
- At least one special character (punctuation, brackets, quotes, etc.)
|
||||
"""
|
||||
if not check_password_complexity(password):
|
||||
raise InvalidDataError(error_message)
|
||||
|
||||
user_flid = await self.user_auth_service.get_user_flid(user_id)
|
||||
await self.user_auth_service.save_password_auth_method_no_depot(
|
||||
user_id, user_flid, password
|
||||
)
|
||||
return {"succeeded": True}
|
||||
|
||||
async def send_email_code(self, sender_id: str, email: str) -> bool:
|
||||
mail_code = await self.user_auth_service.generate_auth_code_for_object(
|
||||
email, AuthType.EMAIL
|
||||
)
|
||||
success = await self.notification_service.send_notification(
|
||||
sender_id=sender_id,
|
||||
channels=["email"],
|
||||
receiver_id=email,
|
||||
subject="email",
|
||||
event="authentication",
|
||||
properties={"auth_code": mail_code},
|
||||
)
|
||||
return success
|
||||
|
||||
async def send_mobile_code(self, sender_id: str, mobile: str) -> bool:
|
||||
mail_code = await self.user_auth_service.generate_auth_code_for_object(
|
||||
mobile, AuthType.MOBILE
|
||||
)
|
||||
success = await self.notification_service.send_notification(
|
||||
sender_id=sender_id,
|
||||
channels=["email"],
|
||||
receiver_id=mobile,
|
||||
subject="mobile",
|
||||
event="authentication",
|
||||
properties={"auth_code": mail_code},
|
||||
)
|
||||
return success
|
||||
0
backend/infra/__init__.py
Normal file
0
backend/infra/__init__.py
Normal file
52
backend/infra/api_key_introspect_handler.py
Normal file
52
backend/infra/api_key_introspect_handler.py
Normal file
@ -0,0 +1,52 @@
|
||||
from typing import Dict, Any
|
||||
import httpx
|
||||
from fastapi import HTTPException
|
||||
from common.config.app_settings import app_settings
|
||||
from common.log.log_utils import log_entry_exit_async
|
||||
from common.log.module_logger import ModuleLogger
|
||||
|
||||
|
||||
class ApiKeyIntrospectHandler:
|
||||
"""
|
||||
Freeleaps Auth Service API Key Introspect Handle
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.module_logger = ModuleLogger(sender_id=ApiKeyIntrospectHandler.__name__)
|
||||
self.auth_service_base = app_settings.AUTH_SERVICE_ENDPOINT
|
||||
|
||||
|
||||
@log_entry_exit_async
|
||||
async def api_key_introspect(self, api_key: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Introspect API key by calling external auth service
|
||||
|
||||
Args:
|
||||
api_key: The API key to introspect
|
||||
|
||||
Returns:
|
||||
Dictionary containing the API key details
|
||||
|
||||
Raises:
|
||||
HTTPException: If the external service call fails
|
||||
"""
|
||||
api_url = self.auth_service_base + "keys/introspect_api_key"
|
||||
await self.module_logger.log_info(f"Starting API Key validation for key")
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(
|
||||
api_url,
|
||||
json={"api_key": api_key}
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
error_detail = response.json() if response.content else {"error": "Unknown error"}
|
||||
await self.module_logger.log_error(f"API Key validation failed - Status: {response.status_code}, Error: {error_detail}")
|
||||
raise HTTPException(
|
||||
status_code=response.status_code,
|
||||
detail=error_detail
|
||||
)
|
||||
|
||||
validation_result = response.json()
|
||||
await self.module_logger.log_info(f"API Key validation successful - Active: {validation_result.get('active', False)}")
|
||||
return validation_result
|
||||
355
backend/infra/auth/user_auth_handler.py
Normal file
355
backend/infra/auth/user_auth_handler.py
Normal file
@ -0,0 +1,355 @@
|
||||
import bcrypt
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Optional
|
||||
|
||||
from common.utils.string import generate_auth_code
|
||||
from backend.services.code_depot.code_depot_service import (
|
||||
CodeDepotService,
|
||||
)
|
||||
from backend.models.user.constants import (
|
||||
AuthType,
|
||||
)
|
||||
from backend.models.user.models import (
|
||||
AuthCodeDoc,
|
||||
UserEmailDoc,
|
||||
UserMobileDoc,
|
||||
UserPasswordDoc,
|
||||
)
|
||||
|
||||
from backend.models.user_profile.models import BasicProfileDoc
|
||||
|
||||
|
||||
class UserAuthHandler:
|
||||
def __init__(self) -> None:
|
||||
self.code_depot_service = CodeDepotService()
|
||||
|
||||
async def verify_user_with_password(self, user_id: str, password: str) -> bool:
|
||||
"""Verify user's password
|
||||
Args:
|
||||
user_id (str): user identity, _id in UserAccountDoc
|
||||
password (str): password user provided, clear text
|
||||
|
||||
Returns:
|
||||
bool: True if password is correct, else return False
|
||||
"""
|
||||
|
||||
user_password = await UserPasswordDoc.find(
|
||||
UserPasswordDoc.user_id == user_id
|
||||
).first_or_none()
|
||||
|
||||
if user_password:
|
||||
# password is reseted to empty string, cannot be verified
|
||||
if user_password.password == "":
|
||||
return False
|
||||
|
||||
if bcrypt.checkpw(
|
||||
password.encode("utf-8"), user_password.password.encode("utf-8")
|
||||
):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
|
||||
async def get_user_password(self, user_id: str) -> Optional[str]:
|
||||
"""Get user password through the user_id
|
||||
|
||||
Args:
|
||||
user_id (str): user identity, _id in UserAccountDoc
|
||||
|
||||
Returns:
|
||||
str: password hash
|
||||
"""
|
||||
|
||||
user_password = await UserPasswordDoc.find(
|
||||
UserPasswordDoc.user_id == user_id
|
||||
).first_or_none()
|
||||
|
||||
if user_password is None:
|
||||
return None
|
||||
else:
|
||||
return user_password.password
|
||||
|
||||
async def get_user_email(self, user_id: str) -> Optional[str]:
|
||||
"""get user email through the user_id
|
||||
|
||||
Args:
|
||||
user_id (str): user identity, _id in UserAccountDoc
|
||||
|
||||
Returns:
|
||||
str: email address
|
||||
"""
|
||||
user_email = await UserEmailDoc.find(
|
||||
UserEmailDoc.user_id == user_id
|
||||
).first_or_none()
|
||||
|
||||
if user_email is None:
|
||||
return None
|
||||
else:
|
||||
return user_email.email
|
||||
|
||||
async def get_user_id_by_email(self, email: str) -> Optional[str]:
|
||||
"""get user id through email from user_email doc
|
||||
|
||||
Args:
|
||||
email (str): email address, compare email address in lowercase
|
||||
|
||||
Returns:
|
||||
Optional[str]: user_id or None
|
||||
"""
|
||||
user_email = await UserEmailDoc.find(
|
||||
UserEmailDoc.email == email.lower()
|
||||
).first_or_none()
|
||||
|
||||
if user_email is None:
|
||||
return None
|
||||
else:
|
||||
return user_email.user_id
|
||||
|
||||
def user_sign_out(self, token):
|
||||
pass
|
||||
|
||||
async def verify_email_code(self, email: str, code: str) -> bool:
|
||||
"""sign in with email and code
|
||||
|
||||
Args:
|
||||
email (str): email address
|
||||
code (str): auth code to be verified
|
||||
|
||||
Returns:
|
||||
bool: True if code is valid, False otherwise
|
||||
"""
|
||||
result = await AuthCodeDoc.find(
|
||||
AuthCodeDoc.method == email.lower(),
|
||||
AuthCodeDoc.auth_code == code,
|
||||
AuthCodeDoc.expiry > datetime.now(timezone.utc),
|
||||
AuthCodeDoc.method_type == AuthType.EMAIL,
|
||||
).first_or_none()
|
||||
|
||||
if result:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
async def get_user_mobile(self, user_id: str) -> Optional[str]:
|
||||
"""get user mobile number through the user_id
|
||||
|
||||
Args:
|
||||
user_id (str): user identity, _id in UserAccountDoc
|
||||
|
||||
Returns:
|
||||
str: mobile number
|
||||
"""
|
||||
user_mobile = await UserMobileDoc.find(
|
||||
UserMobileDoc.user_id == user_id
|
||||
).first_or_none()
|
||||
|
||||
if user_mobile is None:
|
||||
return None
|
||||
else:
|
||||
return user_mobile.mobile
|
||||
|
||||
async def generate_auth_code_for_mobile(self, mobile: str) -> str:
|
||||
"""send auth code to mobile number
|
||||
|
||||
Args:
|
||||
mobile (str): mobile number
|
||||
"""
|
||||
auth_code = generate_auth_code()
|
||||
expiry = datetime.now(timezone.utc) + timedelta(minutes=5)
|
||||
auth_code_doc = AuthCodeDoc(
|
||||
auth_code=auth_code,
|
||||
method=mobile.lower(),
|
||||
method_type=AuthType.MOBILE,
|
||||
expiry=expiry,
|
||||
)
|
||||
|
||||
await auth_code_doc.create()
|
||||
return auth_code
|
||||
|
||||
async def verify_mobile_with_code(self, mobile, code):
|
||||
"""sign in with mobile and code
|
||||
|
||||
Args:
|
||||
mobile (str): mobile number
|
||||
code (str): auth code to be verified
|
||||
|
||||
Returns:
|
||||
bool: True if code is valid, False otherwise
|
||||
"""
|
||||
result = await AuthCodeDoc.find(
|
||||
AuthCodeDoc.method == mobile.lower(),
|
||||
AuthCodeDoc.auth_code == code,
|
||||
AuthCodeDoc.expiry > datetime.now(timezone.utc),
|
||||
AuthCodeDoc.method_type == AuthType.MOBILE,
|
||||
).first_or_none()
|
||||
|
||||
if result:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
async def save_email_auth_method(self, user_id: str, email: str):
|
||||
"""save email auth method to user_email doc
|
||||
|
||||
Args:
|
||||
user_id (str): user id
|
||||
email (str): email address
|
||||
"""
|
||||
user_email = await UserEmailDoc.find(
|
||||
UserEmailDoc.user_id == user_id
|
||||
).first_or_none()
|
||||
|
||||
if user_email is None:
|
||||
new_user_email = UserEmailDoc(user_id=user_id, email=email.lower())
|
||||
await new_user_email.create()
|
||||
else:
|
||||
user_email.email = email.lower()
|
||||
await user_email.save()
|
||||
|
||||
async def save_password_auth_method(self, user_id: str, user_flid, password: str):
|
||||
"""save password auth method to user_password doc
|
||||
|
||||
Args:
|
||||
user_id (str): user id
|
||||
password (str): user password
|
||||
"""
|
||||
password_hashed = bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt())
|
||||
|
||||
user_password = await UserPasswordDoc.find(
|
||||
UserPasswordDoc.user_id == user_id
|
||||
).first_or_none()
|
||||
|
||||
if user_password is None:
|
||||
new_user_password = UserPasswordDoc(
|
||||
user_id=user_id, password=password_hashed
|
||||
)
|
||||
await new_user_password.create()
|
||||
else:
|
||||
user_password.password = password_hashed
|
||||
await user_password.save()
|
||||
|
||||
result = await self.code_depot_service.update_depot_user_password(
|
||||
user_flid, password
|
||||
)
|
||||
if not result:
|
||||
raise Exception("Failed to update user password in code depot")
|
||||
|
||||
async def save_password_auth_method_no_depot(self, user_id: str, user_flid, password: str):
|
||||
"""save password auth method to user_password doc without updating depot service
|
||||
|
||||
Args:
|
||||
user_id (str): user id
|
||||
password (str): user password
|
||||
"""
|
||||
password_hashed = bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt())
|
||||
|
||||
user_password = await UserPasswordDoc.find(
|
||||
UserPasswordDoc.user_id == user_id
|
||||
).first_or_none()
|
||||
|
||||
if user_password is None:
|
||||
new_user_password = UserPasswordDoc(
|
||||
user_id=user_id, password=password_hashed
|
||||
)
|
||||
await new_user_password.create()
|
||||
else:
|
||||
user_password.password = password_hashed
|
||||
await user_password.save()
|
||||
|
||||
# Skip depot service call - users don't exist in Gitea, so we don't update depot password
|
||||
|
||||
async def reset_password(self, user_id: str):
|
||||
"""clean password auth method from user_password doc
|
||||
|
||||
Args:
|
||||
user_id (str): user id
|
||||
"""
|
||||
user_password = await UserPasswordDoc.find(
|
||||
UserPasswordDoc.user_id == user_id
|
||||
).first_or_none()
|
||||
|
||||
if user_password:
|
||||
user_password.password = ""
|
||||
await user_password.save()
|
||||
else:
|
||||
raise Exception("User password was not set before.")
|
||||
|
||||
async def is_password_reset_required(self, user_id: str) -> bool:
|
||||
"""check if password is required for the user
|
||||
|
||||
Args:
|
||||
user_id (str): user id
|
||||
|
||||
Returns:
|
||||
bool: True if password is required, False otherwise
|
||||
"""
|
||||
user_password = await UserPasswordDoc.find(
|
||||
UserPasswordDoc.user_id == user_id
|
||||
).first_or_none()
|
||||
|
||||
if user_password:
|
||||
return user_password.password == ""
|
||||
else:
|
||||
return True
|
||||
|
||||
async def is_flid_reset_required(self, user_id: str) -> bool:
|
||||
basic_profile = await BasicProfileDoc.find_one(
|
||||
BasicProfileDoc.user_id == user_id
|
||||
)
|
||||
|
||||
if basic_profile:
|
||||
return basic_profile.FLID.update_time == basic_profile.FLID.create_time
|
||||
|
||||
async def is_flid_available(self, user_flid: str) -> bool:
|
||||
basic_profile = await BasicProfileDoc.find_one(
|
||||
BasicProfileDoc.FLID.identity == user_flid
|
||||
)
|
||||
|
||||
if basic_profile:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
async def get_flid(self, user_id: str) -> str:
|
||||
basic_profile = await BasicProfileDoc.find_one(
|
||||
BasicProfileDoc.user_id == user_id
|
||||
)
|
||||
|
||||
if basic_profile:
|
||||
return basic_profile.FLID.identity
|
||||
else:
|
||||
return None
|
||||
|
||||
async def update_flid(self, user_id: str, flid: str) -> bool:
|
||||
basic_profile = await BasicProfileDoc.find_one(
|
||||
BasicProfileDoc.user_id == user_id
|
||||
)
|
||||
|
||||
if basic_profile:
|
||||
basic_profile.FLID.identity = flid
|
||||
basic_profile.FLID.update_time = datetime.now(timezone.utc)
|
||||
basic_profile.FLID.set_by = user_id
|
||||
await basic_profile.save()
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
async def generate_auth_code(self, deliver_object: str, auth_type: AuthType) -> str:
|
||||
"""send auth code to email address
|
||||
|
||||
Args:
|
||||
deliver_object (str): email address, mobile, etc
|
||||
auth_type (str): authentication type
|
||||
"""
|
||||
auth_code = generate_auth_code()
|
||||
expiry = datetime.now(timezone.utc) + timedelta(minutes=5)
|
||||
auth_code_doc = AuthCodeDoc(
|
||||
auth_code=auth_code,
|
||||
method=deliver_object.lower(),
|
||||
method_type=auth_type,
|
||||
expiry=expiry,
|
||||
)
|
||||
|
||||
await auth_code_doc.create()
|
||||
return auth_code
|
||||
0
backend/infra/permission/__init__.py
Normal file
0
backend/infra/permission/__init__.py
Normal file
179
backend/infra/permission/permission_handler.py
Normal file
179
backend/infra/permission/permission_handler.py
Normal file
@ -0,0 +1,179 @@
|
||||
from typing import Optional, List, Tuple
|
||||
|
||||
from fastapi.exceptions import RequestValidationError
|
||||
|
||||
from backend.models.permission.models import PermissionDoc, RoleDoc
|
||||
from bson import ObjectId
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class PermissionHandler:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
async def create_permission(self, permission_key: str, permission_name: str,
|
||||
description: Optional[str] = None) -> Optional[PermissionDoc]:
|
||||
"""Create a new permission document"""
|
||||
if not permission_key or not permission_name:
|
||||
raise RequestValidationError("permission_key and permission_name are required.")
|
||||
# if exists.
|
||||
if await PermissionDoc.find_one(
|
||||
{str(PermissionDoc.permission_key): permission_key}) or await PermissionDoc.find_one(
|
||||
{str(PermissionDoc.permission_name): permission_name}):
|
||||
raise RequestValidationError("permission has already been created.")
|
||||
doc = PermissionDoc(
|
||||
permission_key=permission_key,
|
||||
permission_name=permission_name,
|
||||
description=description,
|
||||
created_at=datetime.now(),
|
||||
updated_at=datetime.now()
|
||||
)
|
||||
await doc.create()
|
||||
return doc
|
||||
|
||||
async def update_permission(self, permission_id: str, permission_key: Optional[str] = None,
|
||||
permission_name: Optional[str] = None, description: Optional[str] = None) -> Optional[
|
||||
PermissionDoc]:
|
||||
"""Update an existing permission document by id, ensuring permission_key is unique"""
|
||||
if not permission_id or not permission_key or not permission_name:
|
||||
raise RequestValidationError("permission_id, permission_key and permission_name is required.")
|
||||
doc = await PermissionDoc.get(permission_id)
|
||||
if not doc:
|
||||
raise RequestValidationError("Permission not found.")
|
||||
if doc.is_default:
|
||||
raise RequestValidationError("Default permission cannot be updated.")
|
||||
# Check for uniqueness (exclude self)
|
||||
conflict = await PermissionDoc.find_one({
|
||||
"$and": [
|
||||
{"_id": {"$ne": permission_id}},
|
||||
{"$or": [
|
||||
{str(PermissionDoc.permission_key): permission_key},
|
||||
{str(PermissionDoc.permission_name): permission_name}
|
||||
]}
|
||||
]
|
||||
})
|
||||
if conflict:
|
||||
raise RequestValidationError("Permission name or permission key already exists.")
|
||||
doc.permission_key = permission_key
|
||||
doc.permission_name = permission_name
|
||||
doc.description = description
|
||||
doc.updated_at = datetime.now()
|
||||
|
||||
await doc.save()
|
||||
return doc
|
||||
|
||||
async def create_or_update_permission(self, permission_key: str, permission_name: str, custom_permission_id: Optional[str], description: Optional[str] = None) -> Optional[PermissionDoc]:
|
||||
"""Create or update a permission document"""
|
||||
# Input validation
|
||||
if not permission_key or not permission_name:
|
||||
raise RequestValidationError("permission_key and permission_name are required.")
|
||||
|
||||
def create_new_doc():
|
||||
return PermissionDoc(
|
||||
permission_key=permission_key,
|
||||
permission_name=permission_name,
|
||||
description=description,
|
||||
created_at=datetime.now(),
|
||||
updated_at=datetime.now()
|
||||
)
|
||||
|
||||
def update_doc_fields(doc):
|
||||
doc.permission_key = permission_key
|
||||
doc.permission_name = permission_name
|
||||
doc.description = description
|
||||
doc.updated_at = datetime.now()
|
||||
|
||||
try:
|
||||
# Check if permission with this key already exists
|
||||
existing_doc = await PermissionDoc.find_one(
|
||||
{str(PermissionDoc.permission_key): permission_key}
|
||||
)
|
||||
except Exception as e:
|
||||
existing_doc = None
|
||||
|
||||
if existing_doc:
|
||||
# If permission with this key already exists
|
||||
if custom_permission_id and str(custom_permission_id) != str(existing_doc.id):
|
||||
# Different ID provided - replace the document
|
||||
id_conflict = await PermissionDoc.get(custom_permission_id)
|
||||
if id_conflict:
|
||||
raise RequestValidationError("Permission with the provided ID already exists.")
|
||||
|
||||
new_doc = create_new_doc()
|
||||
new_doc.id = custom_permission_id
|
||||
await new_doc.create()
|
||||
await existing_doc.delete()
|
||||
return new_doc
|
||||
else:
|
||||
# Same ID or no ID provided - update existing document
|
||||
update_doc_fields(existing_doc)
|
||||
await existing_doc.save()
|
||||
return existing_doc
|
||||
else:
|
||||
# If no existing document with this key, create new document
|
||||
new_doc = create_new_doc()
|
||||
|
||||
if custom_permission_id:
|
||||
id_conflict = await PermissionDoc.get(custom_permission_id)
|
||||
if id_conflict:
|
||||
raise RequestValidationError("Permission with the provided ID already exists.")
|
||||
new_doc.id = custom_permission_id
|
||||
|
||||
await new_doc.create()
|
||||
return new_doc
|
||||
|
||||
async def query_permissions(
|
||||
self,
|
||||
permission_key: Optional[str] = None,
|
||||
permission_name: Optional[str] = None,
|
||||
skip: int = 0,
|
||||
limit: int = 10
|
||||
) -> Tuple[List[PermissionDoc], int]:
|
||||
"""Query permissions with pagination and fuzzy search"""
|
||||
query = {}
|
||||
if permission_key:
|
||||
query["permission_key"] = {"$regex": permission_key, "$options": "i"}
|
||||
if permission_name:
|
||||
query["permission_name"] = {"$regex": permission_name, "$options": "i"}
|
||||
cursor = PermissionDoc.find(query)
|
||||
total = await cursor.count()
|
||||
docs = await cursor.skip(skip).limit(limit).to_list()
|
||||
return docs, total
|
||||
|
||||
async def query_permissions_no_pagination(
|
||||
self,
|
||||
permission_id: Optional[str] = None,
|
||||
permission_key: Optional[str] = None,
|
||||
permission_name: Optional[str] = None
|
||||
) -> Tuple[List[PermissionDoc], int]:
|
||||
"""Query permissions fuzzy search"""
|
||||
query = {}
|
||||
if permission_id:
|
||||
try:
|
||||
query["_id"] = ObjectId(permission_id) # Convert string to ObjectId for MongoDB
|
||||
except Exception:
|
||||
raise RequestValidationError("Invalid permission_id format. Must be a valid ObjectId.")
|
||||
if permission_key:
|
||||
query["permission_key"] = {"$regex": permission_key, "$options": "i"}
|
||||
if permission_name:
|
||||
query["permission_name"] = {"$regex": permission_name, "$options": "i"}
|
||||
cursor = PermissionDoc.find(query)
|
||||
total = await cursor.count()
|
||||
docs = await cursor.to_list()
|
||||
return docs, total
|
||||
|
||||
async def delete_permission(self, permission_id: str) -> None:
|
||||
"""Delete a permission document after checking if it is referenced by any role and is not default"""
|
||||
if not permission_id:
|
||||
raise RequestValidationError("permission_id is required.")
|
||||
# Check if any role references this permission
|
||||
role = await RoleDoc.find_one({"permission_ids": str(permission_id)})
|
||||
if role:
|
||||
raise RequestValidationError("Permission is referenced by a role and cannot be deleted.")
|
||||
doc = await PermissionDoc.get(permission_id)
|
||||
if not doc:
|
||||
raise RequestValidationError("Permission not found.")
|
||||
# Check if the permission is default
|
||||
if doc.is_default:
|
||||
raise RequestValidationError("Default permission cannot be deleted.")
|
||||
await doc.delete()
|
||||
195
backend/infra/permission/role_handler.py
Normal file
195
backend/infra/permission/role_handler.py
Normal file
@ -0,0 +1,195 @@
|
||||
from typing import Optional, List, Tuple
|
||||
|
||||
from fastapi.exceptions import RequestValidationError
|
||||
|
||||
from backend.models.permission.models import RoleDoc, PermissionDoc, UserRoleDoc
|
||||
from bson import ObjectId
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class RoleHandler:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
async def create_role(self, role_key: str, role_name: str, role_description: Optional[str], role_level: int) -> Optional[RoleDoc]:
|
||||
"""Create a new role, ensuring role_key and role_name are unique and not empty"""
|
||||
if not role_key or not role_name:
|
||||
raise RequestValidationError("role_key and role_name are required.")
|
||||
if await RoleDoc.find_one({str(RoleDoc.role_key): role_key}) or await RoleDoc.find_one(
|
||||
{str(RoleDoc.role_name): role_name}):
|
||||
raise RequestValidationError("role_key or role_name has already been created.")
|
||||
doc = RoleDoc(
|
||||
role_key=role_key,
|
||||
role_name=role_name,
|
||||
role_description=role_description,
|
||||
permission_ids=[],
|
||||
role_level=role_level,
|
||||
created_at=datetime.now(),
|
||||
updated_at=datetime.now()
|
||||
)
|
||||
await doc.create()
|
||||
return doc
|
||||
|
||||
async def update_role(self, role_id: str, role_key: str, role_name: str,
|
||||
role_description: Optional[str], role_level: int) -> Optional[
|
||||
RoleDoc]:
|
||||
"""Update an existing role, ensuring role_key and role_name are unique and not empty"""
|
||||
if not role_id or not role_key or not role_name:
|
||||
raise RequestValidationError("role_id, role_key and role_name are required.")
|
||||
doc = await RoleDoc.get(role_id)
|
||||
if not doc:
|
||||
raise RequestValidationError("role not found.")
|
||||
if doc.is_default:
|
||||
raise RequestValidationError("Default role cannot be updated.")
|
||||
# Check for uniqueness (exclude self)
|
||||
conflict = await RoleDoc.find_one({
|
||||
"$and": [
|
||||
{"_id": {"$ne": role_id}},
|
||||
{"$or": [
|
||||
{str(RoleDoc.role_key): role_key},
|
||||
{str(RoleDoc.role_name): role_name}
|
||||
]}
|
||||
]
|
||||
})
|
||||
if conflict:
|
||||
raise RequestValidationError("role_key or role_name already exists.")
|
||||
doc.role_key = role_key
|
||||
doc.role_name = role_name
|
||||
doc.role_description = role_description
|
||||
doc.role_level = role_level
|
||||
doc.updated_at = datetime.now()
|
||||
await doc.save()
|
||||
return doc
|
||||
|
||||
async def create_or_update_role(self, role_key: str, role_name: str, role_level: int, custom_role_id: Optional[str], role_description: Optional[str] = None) -> Optional[RoleDoc]:
|
||||
"""Create or update a role document"""
|
||||
# Input validation
|
||||
if not role_key or not role_name:
|
||||
raise RequestValidationError("role_key and role_name are required.")
|
||||
|
||||
def create_new_doc():
|
||||
return RoleDoc(
|
||||
role_key=role_key,
|
||||
role_name=role_name,
|
||||
role_description=role_description,
|
||||
role_level=role_level,
|
||||
permission_ids=[],
|
||||
created_at=datetime.now(),
|
||||
updated_at=datetime.now()
|
||||
)
|
||||
def update_doc_fields(doc):
|
||||
doc.role_key = role_key
|
||||
doc.role_name = role_name
|
||||
doc.role_description = role_description
|
||||
doc.role_level = role_level
|
||||
doc.updated_at = datetime.now()
|
||||
|
||||
# Check if role with this key already exists
|
||||
existing_doc = await RoleDoc.find_one(
|
||||
{str(RoleDoc.role_key): role_key}
|
||||
)
|
||||
|
||||
if existing_doc:
|
||||
# If role with this key already exists
|
||||
if custom_role_id and str(custom_role_id) != str(existing_doc.id):
|
||||
# Different ID provided - replace the document
|
||||
id_conflict = await RoleDoc.get(custom_role_id)
|
||||
if id_conflict:
|
||||
raise RequestValidationError("Role with the provided ID already exists.")
|
||||
|
||||
new_doc = create_new_doc()
|
||||
new_doc.id = custom_role_id
|
||||
await new_doc.create()
|
||||
await existing_doc.delete()
|
||||
return new_doc
|
||||
|
||||
else:
|
||||
# Same ID or no ID provided - update existing document
|
||||
update_doc_fields(existing_doc)
|
||||
await existing_doc.save()
|
||||
return existing_doc
|
||||
else:
|
||||
# If no existing document with this key, create new document
|
||||
new_doc = create_new_doc()
|
||||
|
||||
if custom_role_id:
|
||||
id_conflict = await RoleDoc.get(custom_role_id)
|
||||
if id_conflict:
|
||||
raise RequestValidationError("Role with the provided ID already exists.")
|
||||
new_doc.id = custom_role_id
|
||||
|
||||
await new_doc.insert()
|
||||
return new_doc
|
||||
|
||||
async def query_roles(self, role_key: Optional[str], role_name: Optional[str], skip: int = 0, limit: int = 10) -> \
|
||||
Tuple[List[RoleDoc], int]:
|
||||
"""Query roles with pagination and fuzzy search by role_key and role_name"""
|
||||
query = {}
|
||||
if role_key:
|
||||
query["role_key"] = {"$regex": role_key, "$options": "i"}
|
||||
if role_name:
|
||||
query["role_name"] = {"$regex": role_name, "$options": "i"}
|
||||
cursor = RoleDoc.find(query)
|
||||
total = await cursor.count()
|
||||
docs = await cursor.skip(skip).limit(limit).to_list()
|
||||
return docs, total
|
||||
|
||||
async def query_roles_no_pagination(
|
||||
self,
|
||||
role_id: Optional[str] = None,
|
||||
role_key: Optional[str] = None,
|
||||
role_name: Optional[str] = None
|
||||
) -> Tuple[List[RoleDoc], int]:
|
||||
"""Query roles fuzzy search without pagination"""
|
||||
query = {}
|
||||
if role_id:
|
||||
try:
|
||||
query["_id"] = ObjectId(role_id) # Convert string to ObjectId for MongoDB
|
||||
except Exception:
|
||||
raise RequestValidationError("Invalid role_id format. Must be a valid ObjectId.")
|
||||
if role_key:
|
||||
query["role_key"] = {"$regex": role_key, "$options": "i"}
|
||||
if role_name:
|
||||
query["role_name"] = {"$regex": role_name, "$options": "i"}
|
||||
cursor = RoleDoc.find(query)
|
||||
total = await cursor.count()
|
||||
docs = await cursor.to_list()
|
||||
return docs, total
|
||||
|
||||
async def assign_permissions_to_role(self, role_id: str, permission_ids: List[str]) -> Optional[RoleDoc]:
|
||||
"""Assign permissions to a role by updating the permission_ids field"""
|
||||
if not role_id or not permission_ids:
|
||||
raise RequestValidationError("role_id and permission_ids are required.")
|
||||
doc = await RoleDoc.get(role_id)
|
||||
if not doc:
|
||||
raise RequestValidationError("Role not found.")
|
||||
|
||||
# Validate that all permission_ids exist in the permission collection
|
||||
for permission_id in permission_ids:
|
||||
permission_doc = await PermissionDoc.get(permission_id)
|
||||
if not permission_doc:
|
||||
raise RequestValidationError(f"Permission with id {permission_id} not found.")
|
||||
|
||||
# Remove duplicates from permission_ids
|
||||
unique_permission_ids = list(dict.fromkeys(permission_ids))
|
||||
|
||||
doc.permission_ids = unique_permission_ids
|
||||
doc.updated_at = datetime.now()
|
||||
await doc.save()
|
||||
return doc
|
||||
|
||||
async def delete_role(self, role_id: str) -> None:
|
||||
"""Delete a role document after checking if it is referenced by any user and is not default"""
|
||||
if not role_id:
|
||||
raise RequestValidationError("role_id is required.")
|
||||
# Check if any user references this role
|
||||
user_role = await UserRoleDoc.find_one({"role_ids": str(role_id)})
|
||||
if user_role:
|
||||
raise RequestValidationError("Role is referenced by a user and cannot be deleted.")
|
||||
doc = await RoleDoc.get(role_id)
|
||||
if not doc:
|
||||
raise RequestValidationError("Role not found.")
|
||||
# Check if the role is default
|
||||
if doc.is_default:
|
||||
raise RequestValidationError("Default role cannot be deleted.")
|
||||
await doc.delete()
|
||||
65
backend/infra/permission/user_role_handler.py
Normal file
65
backend/infra/permission/user_role_handler.py
Normal file
@ -0,0 +1,65 @@
|
||||
from typing import Optional, List
|
||||
from fastapi.exceptions import RequestValidationError
|
||||
from backend.models.permission.models import RoleDoc, UserRoleDoc, PermissionDoc
|
||||
from bson import ObjectId
|
||||
|
||||
|
||||
class UserRoleHandler:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
async def assign_roles_to_user(self, user_id: str, role_ids: List[str]) -> Optional[UserRoleDoc]:
|
||||
"""Assign roles to a user by updating or creating the UserRoleDoc"""
|
||||
if not user_id or not role_ids:
|
||||
raise RequestValidationError("user_id and role_ids are required.")
|
||||
|
||||
# Validate that all role_ids exist in the role collection
|
||||
for role_id in role_ids:
|
||||
role_doc = await RoleDoc.get(role_id)
|
||||
if not role_doc:
|
||||
raise RequestValidationError(f"Role with id {role_id} not found.")
|
||||
|
||||
# Remove duplicates from role_ids
|
||||
unique_role_ids = list(dict.fromkeys(role_ids))
|
||||
|
||||
# Check if UserRoleDoc already exists for this user
|
||||
existing_user_role = await UserRoleDoc.find_one(UserRoleDoc.user_id == user_id)
|
||||
|
||||
if existing_user_role:
|
||||
# Update existing UserRoleDoc
|
||||
existing_user_role.role_ids = unique_role_ids
|
||||
await existing_user_role.save()
|
||||
return existing_user_role
|
||||
else:
|
||||
# Create new UserRoleDoc
|
||||
user_role_doc = UserRoleDoc(
|
||||
user_id=user_id,
|
||||
role_ids=unique_role_ids
|
||||
)
|
||||
await user_role_doc.insert()
|
||||
return user_role_doc
|
||||
|
||||
async def get_role_and_permission_by_user_id(self, user_id: str) -> tuple[list[str], list[str]]:
|
||||
"""Get all role names and permission keys for a user by user_id"""
|
||||
# Query user roles
|
||||
user_role_doc = await UserRoleDoc.find_one(UserRoleDoc.user_id == user_id)
|
||||
if not user_role_doc or not user_role_doc.role_ids:
|
||||
# No roles assigned
|
||||
return [], []
|
||||
# Query all roles by role_ids
|
||||
roles = await RoleDoc.find({"_id": {"$in": user_role_doc.role_ids}}).to_list()
|
||||
role_names = [role.role_name for role in roles]
|
||||
# Collect all permission_ids from all roles
|
||||
all_permission_ids = []
|
||||
for role in roles:
|
||||
if role.permission_ids:
|
||||
all_permission_ids.extend(role.permission_ids)
|
||||
# Remove duplicates
|
||||
unique_permission_ids = list(dict.fromkeys(all_permission_ids))
|
||||
# Query all permissions by permission_ids
|
||||
if unique_permission_ids:
|
||||
permissions = await PermissionDoc.find({"_id": {"$in": unique_permission_ids}}).to_list()
|
||||
permission_keys = [perm.permission_key for perm in permissions]
|
||||
else:
|
||||
permission_keys = []
|
||||
return role_names, permission_keys
|
||||
121
backend/infra/user_profile/user_profile_handler.py
Normal file
121
backend/infra/user_profile/user_profile_handler.py
Normal file
@ -0,0 +1,121 @@
|
||||
from common.constants.region import UserRegion
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from backend.models.user.models import UserAccountDoc
|
||||
from backend.models.user.constants import (
|
||||
UserAccountProperty,
|
||||
)
|
||||
from backend.models.permission.constants import (
|
||||
AdministrativeRole,
|
||||
Capability,
|
||||
)
|
||||
from typing import Optional
|
||||
from backend.models.user_profile.models import (
|
||||
SelfIntro,
|
||||
Tags,
|
||||
Photo,
|
||||
Email,
|
||||
Mobile,
|
||||
FLID,
|
||||
Password,
|
||||
BasicProfileDoc,
|
||||
ProviderProfileDoc,
|
||||
ExpectedSalary,
|
||||
)
|
||||
|
||||
from backend.models.user.constants import UserRegionToCurrency
|
||||
|
||||
|
||||
class UserProfileHandler:
|
||||
async def create_new_user_account(
|
||||
self,
|
||||
property: UserAccountProperty,
|
||||
capability: Capability,
|
||||
user_role: AdministrativeRole,
|
||||
region: UserRegion,
|
||||
) -> UserAccountDoc:
|
||||
user_account = UserAccountDoc(
|
||||
profile_id=None,
|
||||
account_id=None,
|
||||
service_plan_id=None,
|
||||
properties=int(property),
|
||||
capabilities=int(capability),
|
||||
user_role=int(user_role),
|
||||
region=region,
|
||||
)
|
||||
return await user_account.create()
|
||||
|
||||
async def create_basic_profile(
|
||||
self,
|
||||
user_id: str,
|
||||
email_address: str,
|
||||
email_verified: bool,
|
||||
mobile_number: str,
|
||||
mobile_verified: bool,
|
||||
password_setup: bool,
|
||||
region: UserRegion,
|
||||
time_zone: Optional[str] = "UTC",
|
||||
) -> BasicProfileDoc:
|
||||
basic_profile = await BasicProfileDoc.find_one(
|
||||
BasicProfileDoc.user_id == user_id
|
||||
)
|
||||
if basic_profile:
|
||||
return basic_profile
|
||||
else:
|
||||
tags = Tags(skill=[])
|
||||
self_intro = SelfIntro(summary="", content_html="", tags=tags)
|
||||
photo = Photo(url="", base64="", filename="")
|
||||
email = Email(address=email_address, verified=email_verified)
|
||||
mobile = Mobile(number=mobile_number, verified=mobile_verified)
|
||||
current_time = datetime.now(timezone.utc)
|
||||
flid = FLID(
|
||||
identity=user_id,
|
||||
set_by=user_id,
|
||||
create_time=current_time,
|
||||
update_time=current_time,
|
||||
)
|
||||
password = Password(
|
||||
set_up=password_setup,
|
||||
update_time=current_time,
|
||||
expiry=(current_time + timedelta(days=365)),
|
||||
)
|
||||
basic_profile = BasicProfileDoc(
|
||||
user_id=user_id,
|
||||
self_intro=self_intro,
|
||||
photo=photo,
|
||||
email=email,
|
||||
mobile=mobile,
|
||||
FLID=flid,
|
||||
password=password,
|
||||
region=region,
|
||||
time_zone=time_zone,
|
||||
)
|
||||
new_basic_profile = await basic_profile.create()
|
||||
return new_basic_profile
|
||||
|
||||
async def create_provider_profile(self, user_id: str) -> ProviderProfileDoc:
|
||||
provider_profile = await ProviderProfileDoc.find_one(
|
||||
{"user_id": user_id}
|
||||
)
|
||||
if provider_profile:
|
||||
return provider_profile
|
||||
else:
|
||||
region = await self.__get_user_region(user_id)
|
||||
expected_salary = ExpectedSalary(
|
||||
currency=UserRegionToCurrency[region], hourly=0.0
|
||||
)
|
||||
provider_profile = ProviderProfileDoc(
|
||||
user_id=user_id,
|
||||
expected_salary=expected_salary,
|
||||
accepting_request=False,
|
||||
)
|
||||
new_provider_profile = await provider_profile.create()
|
||||
return new_provider_profile
|
||||
|
||||
async def get_account_by_id(self, user_id: str) -> UserAccountDoc:
|
||||
return await UserAccountDoc.get(user_id)
|
||||
|
||||
async def __get_user_region(self, user_id: str) -> UserRegion:
|
||||
user_profile = await BasicProfileDoc.find_one(
|
||||
BasicProfileDoc.user_id == user_id
|
||||
)
|
||||
return user_profile.region if user_profile else UserRegion.OTHER
|
||||
8
backend/models/__init__.py
Normal file
8
backend/models/__init__.py
Normal file
@ -0,0 +1,8 @@
|
||||
from .user import user_models
|
||||
from .user_profile import profile_models
|
||||
from .permission import permission_models
|
||||
|
||||
backend_models = []
|
||||
backend_models.extend(user_models)
|
||||
backend_models.extend(profile_models)
|
||||
backend_models.extend(permission_models)
|
||||
415
backend/models/base_doc.py
Normal file
415
backend/models/base_doc.py
Normal file
@ -0,0 +1,415 @@
|
||||
"""
|
||||
BaseDoc - A custom document class that provides Beanie-like interface using direct MongoDB operations
|
||||
"""
|
||||
import asyncio
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional, List, Dict, Any, Type, Union
|
||||
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorDatabase
|
||||
from pydantic import BaseModel
|
||||
from pydantic._internal._model_construction import ModelMetaclass
|
||||
from common.config.app_settings import app_settings
|
||||
|
||||
|
||||
class QueryExpression:
|
||||
"""Query expression for field comparisons"""
|
||||
def __init__(self, field_name: str):
|
||||
self.field_name = field_name
|
||||
|
||||
def __eq__(self, other: Any) -> Dict[str, Any]:
|
||||
"""Handle field == value comparisons"""
|
||||
return {self.field_name: other}
|
||||
|
||||
def __ne__(self, other: Any) -> Dict[str, Any]:
|
||||
"""Handle field != value comparisons"""
|
||||
return {self.field_name: {"$ne": other}}
|
||||
|
||||
def __gt__(self, other: Any) -> Dict[str, Any]:
|
||||
"""Handle field > value comparisons"""
|
||||
return {self.field_name: {"$gt": other}}
|
||||
|
||||
def __lt__(self, other: Any) -> Dict[str, Any]:
|
||||
"""Handle field < value comparisons"""
|
||||
return {self.field_name: {"$lt": other}}
|
||||
|
||||
def __ge__(self, other: Any) -> Dict[str, Any]:
|
||||
"""Handle field >= value comparisons"""
|
||||
return {self.field_name: {"$gte": other}}
|
||||
|
||||
def __le__(self, other: Any) -> Dict[str, Any]:
|
||||
"""Handle field <= value comparisons"""
|
||||
return {self.field_name: {"$lte": other}}
|
||||
|
||||
|
||||
class FieldDescriptor:
|
||||
"""Descriptor for field access like Beanie's field == value pattern"""
|
||||
def __init__(self, field_name: str, field_type: type):
|
||||
self.field_name = field_name
|
||||
self.field_type = field_type
|
||||
|
||||
def __get__(self, instance: Any, owner: type) -> Any:
|
||||
"""
|
||||
- Class access (instance is None): return QueryExpression for building queries
|
||||
- Instance access (instance is not None): return the actual field value
|
||||
"""
|
||||
if instance is None:
|
||||
return QueryExpression(self.field_name)
|
||||
return instance.__dict__.get(self.field_name)
|
||||
|
||||
def __set__(self, instance: Any, value: Any) -> None:
|
||||
"""Set instance field value with type validation (compatible with Pydantic validation)"""
|
||||
if not isinstance(value, self.field_type):
|
||||
raise TypeError(f"Field {self.field_name} must be {self.field_type}")
|
||||
instance.__dict__[self.field_name] = value
|
||||
|
||||
|
||||
class FieldCondition:
|
||||
"""Represents a field condition for MongoDB queries"""
|
||||
def __init__(self, field_name: str, value: Any, operator: str = "$eq"):
|
||||
self.field_name = field_name
|
||||
self.value = value
|
||||
self.operator = operator
|
||||
self.left = self # For compatibility with existing condition parsing
|
||||
self.right = value
|
||||
|
||||
|
||||
# Module-level variables for database connection
|
||||
_db: Optional[AsyncIOMotorDatabase] = None
|
||||
_client: Optional[AsyncIOMotorClient] = None
|
||||
|
||||
# Context variable for tenant database
|
||||
import contextvars
|
||||
_tenant_db_context: contextvars.ContextVar[Optional[AsyncIOMotorDatabase]] = contextvars.ContextVar('tenant_db', default=None)
|
||||
|
||||
|
||||
class QueryModelMeta(ModelMetaclass):
|
||||
"""Metaclass: automatically create FieldDescriptor for model fields"""
|
||||
def __new__(cls, name: str, bases: tuple, namespace: dict):
|
||||
# Get model field annotations (like name: str -> "name" and str)
|
||||
annotations = namespace.get("__annotations__", {})
|
||||
|
||||
# Create the class first using Pydantic's metaclass
|
||||
new_class = super().__new__(cls, name, bases, namespace)
|
||||
|
||||
# After Pydantic processes the fields, add the descriptors as class attributes
|
||||
for field_name, field_type in annotations.items():
|
||||
if field_name != 'id': # Skip the id field as it's handled specially
|
||||
# Add the descriptor as a class attribute
|
||||
setattr(new_class, field_name, FieldDescriptor(field_name, field_type))
|
||||
|
||||
return new_class
|
||||
|
||||
def __getattr__(cls, name: str):
|
||||
"""Handle field access like Doc.field_name for query building"""
|
||||
# Check if this is a field that exists in the model
|
||||
if hasattr(cls, 'model_fields') and name in cls.model_fields:
|
||||
return QueryExpression(name)
|
||||
raise AttributeError(f"'{cls.__name__}' object has no attribute '{name}'")
|
||||
|
||||
class BaseDoc(BaseModel, metaclass=QueryModelMeta):
|
||||
"""
|
||||
Base document class that provides Beanie-like interface using direct MongoDB operations.
|
||||
All model classes should inherit from this instead of Beanie's Document.
|
||||
"""
|
||||
|
||||
id: Optional[str] = None # MongoDB _id field
|
||||
|
||||
def model_dump(self, **kwargs):
|
||||
"""Override model_dump to exclude field descriptors"""
|
||||
# Get the default model_dump result
|
||||
result = super().model_dump(**kwargs)
|
||||
|
||||
# Remove any field descriptors that might have been included
|
||||
filtered_result = {}
|
||||
for key, value in result.items():
|
||||
if not isinstance(value, FieldDescriptor):
|
||||
filtered_result[key] = value
|
||||
|
||||
return filtered_result
|
||||
|
||||
@classmethod
|
||||
def field(cls, field_name: str) -> QueryExpression:
|
||||
"""Get a field expression for query building"""
|
||||
return QueryExpression(field_name)
|
||||
|
||||
@classmethod
|
||||
async def _get_database(cls) -> AsyncIOMotorDatabase:
|
||||
"""Get database connection using pure AsyncIOMotorClient"""
|
||||
# Try to get tenant database from context first
|
||||
tenant_db = _tenant_db_context.get()
|
||||
if tenant_db is not None:
|
||||
return tenant_db
|
||||
|
||||
# Fallback to global database connection
|
||||
global _db, _client
|
||||
if _db is None:
|
||||
_client = AsyncIOMotorClient(app_settings.MONGODB_URI)
|
||||
_db = _client[app_settings.MONGODB_NAME]
|
||||
return _db
|
||||
|
||||
@classmethod
|
||||
def set_tenant_database(cls, db: AsyncIOMotorDatabase):
|
||||
"""Set the tenant database for this context"""
|
||||
_tenant_db_context.set(db)
|
||||
|
||||
@classmethod
|
||||
def _get_collection_name(cls) -> str:
|
||||
"""Get collection name from Settings or class name"""
|
||||
if hasattr(cls, 'Settings') and hasattr(cls.Settings, 'name'):
|
||||
return cls.Settings.name
|
||||
else:
|
||||
# Convert class name to snake_case for collection name
|
||||
import re
|
||||
name = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', cls.__name__)
|
||||
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', name).lower()
|
||||
|
||||
@classmethod
|
||||
def find(cls, *conditions) -> 'QueryBuilder':
|
||||
"""Find documents matching conditions - returns QueryBuilder for chaining"""
|
||||
return QueryBuilder(cls, conditions)
|
||||
|
||||
@classmethod
|
||||
async def find_one(cls, *conditions) -> Optional['BaseDoc']:
|
||||
"""Find one document matching conditions"""
|
||||
db = await cls._get_database()
|
||||
collection_name = cls._get_collection_name()
|
||||
collection = db[collection_name]
|
||||
|
||||
# Convert Beanie-style conditions to MongoDB query
|
||||
query = cls._convert_conditions_to_query(conditions)
|
||||
|
||||
doc = await collection.find_one(query)
|
||||
if doc:
|
||||
# Extract MongoDB _id and convert to string
|
||||
mongo_id = doc.pop('_id', None)
|
||||
# Filter doc to only include fields defined in the model
|
||||
model_fields = set(cls.model_fields.keys())
|
||||
filtered_doc = {k: v for k, v in doc.items() if k in model_fields}
|
||||
# Add the id field
|
||||
if mongo_id:
|
||||
filtered_doc['id'] = str(mongo_id)
|
||||
return cls(**filtered_doc)
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
async def get(cls, doc_id: str) -> Optional['BaseDoc']:
|
||||
"""Get document by ID"""
|
||||
from bson import ObjectId
|
||||
try:
|
||||
object_id = ObjectId(doc_id)
|
||||
except:
|
||||
return None
|
||||
|
||||
db = await cls._get_database()
|
||||
collection_name = cls._get_collection_name()
|
||||
collection = db[collection_name]
|
||||
|
||||
doc = await collection.find_one({"_id": object_id})
|
||||
if doc:
|
||||
# Extract MongoDB _id and convert to string
|
||||
mongo_id = doc.pop('_id', None)
|
||||
# Filter doc to only include fields defined in the model
|
||||
model_fields = set(cls.model_fields.keys())
|
||||
filtered_doc = {k: v for k, v in doc.items() if k in model_fields}
|
||||
# Add the id field
|
||||
if mongo_id:
|
||||
filtered_doc['id'] = str(mongo_id)
|
||||
return cls(**filtered_doc)
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _convert_conditions_to_query(cls, conditions) -> Dict[str, Any]:
|
||||
"""Convert Beanie-style conditions to MongoDB query"""
|
||||
if not conditions:
|
||||
return {}
|
||||
|
||||
query = {}
|
||||
for condition in conditions:
|
||||
if isinstance(condition, dict):
|
||||
# Handle QueryExpression results (dictionaries) and direct dictionary queries
|
||||
query.update(condition)
|
||||
elif isinstance(condition, FieldCondition):
|
||||
# Handle legacy FieldCondition objects
|
||||
if condition.operator == "$eq":
|
||||
query[condition.field_name] = condition.value
|
||||
else:
|
||||
query[condition.field_name] = {condition.operator: condition.value}
|
||||
elif hasattr(condition, 'left') and hasattr(condition, 'right'):
|
||||
# Handle field == value conditions
|
||||
field_name = condition.left.name
|
||||
value = condition.right
|
||||
query[field_name] = value
|
||||
elif hasattr(condition, '__dict__'):
|
||||
# Handle complex conditions like FLID.identity == value
|
||||
if hasattr(condition, 'left') and hasattr(condition, 'right'):
|
||||
left = condition.left
|
||||
if hasattr(left, 'name') and hasattr(left, 'left'):
|
||||
# Nested field access like FLID.identity
|
||||
field_name = f"{left.left.name}.{left.name}"
|
||||
value = condition.right
|
||||
query[field_name] = value
|
||||
else:
|
||||
field_name = left.name
|
||||
value = condition.right
|
||||
query[field_name] = value
|
||||
|
||||
return query
|
||||
|
||||
def _convert_decimals_to_float(self, obj):
|
||||
"""Convert Decimal objects to float for MongoDB compatibility"""
|
||||
from decimal import Decimal
|
||||
|
||||
if isinstance(obj, Decimal):
|
||||
return float(obj)
|
||||
elif isinstance(obj, dict):
|
||||
return {key: self._convert_decimals_to_float(value) for key, value in obj.items()}
|
||||
elif isinstance(obj, list):
|
||||
return [self._convert_decimals_to_float(item) for item in obj]
|
||||
else:
|
||||
return obj
|
||||
|
||||
async def create(self) -> 'BaseDoc':
|
||||
"""Create this document in the database"""
|
||||
db = await self._get_database()
|
||||
collection_name = self._get_collection_name()
|
||||
collection = db[collection_name]
|
||||
|
||||
# Convert to dict and insert, excluding field descriptors
|
||||
doc_dict = self.model_dump(exclude={'id'})
|
||||
|
||||
# Convert Decimal objects to float for MongoDB compatibility
|
||||
doc_dict = self._convert_decimals_to_float(doc_dict)
|
||||
|
||||
result = await collection.insert_one(doc_dict)
|
||||
|
||||
# Set the id field from the inserted document
|
||||
if result.inserted_id:
|
||||
self.id = str(result.inserted_id)
|
||||
|
||||
# Return the created document
|
||||
return self
|
||||
|
||||
async def save(self) -> 'BaseDoc':
|
||||
"""Save this document to the database (update if exists, create if not)"""
|
||||
db = await self._get_database()
|
||||
collection_name = self._get_collection_name()
|
||||
collection = db[collection_name]
|
||||
|
||||
# Convert to dict, excluding field descriptors
|
||||
doc_dict = self.model_dump(exclude={'id'})
|
||||
|
||||
# Convert Decimal objects to float for MongoDB compatibility
|
||||
doc_dict = self._convert_decimals_to_float(doc_dict)
|
||||
|
||||
# Try to find existing document by user_id or other unique fields
|
||||
query = {}
|
||||
if hasattr(self, 'user_id'):
|
||||
query['user_id'] = self.user_id
|
||||
elif hasattr(self, 'email'):
|
||||
query['email'] = self.email
|
||||
elif hasattr(self, 'mobile'):
|
||||
query['mobile'] = self.mobile
|
||||
elif hasattr(self, 'auth_code'):
|
||||
query['auth_code'] = self.auth_code
|
||||
|
||||
if query:
|
||||
# Update existing document
|
||||
result = await collection.update_one(query, {"$set": doc_dict}, upsert=True)
|
||||
# If it was an insert, set the id field
|
||||
if result.upserted_id:
|
||||
self.id = str(result.upserted_id)
|
||||
else:
|
||||
# Insert new document
|
||||
result = await collection.insert_one(doc_dict)
|
||||
if result.inserted_id:
|
||||
self.id = str(result.inserted_id)
|
||||
|
||||
return self
|
||||
|
||||
async def delete(self) -> bool:
|
||||
"""Delete this document from the database"""
|
||||
db = await self._get_database()
|
||||
collection_name = self._get_collection_name()
|
||||
collection = db[collection_name]
|
||||
|
||||
# Try to find existing document by user_id or other unique fields
|
||||
query = {}
|
||||
if hasattr(self, 'user_id'):
|
||||
query['user_id'] = self.user_id
|
||||
elif hasattr(self, 'email'):
|
||||
query['email'] = self.email
|
||||
elif hasattr(self, 'mobile'):
|
||||
query['mobile'] = self.mobile
|
||||
elif hasattr(self, 'auth_code'):
|
||||
query['auth_code'] = self.auth_code
|
||||
|
||||
if query:
|
||||
result = await collection.delete_one(query)
|
||||
return result.deleted_count > 0
|
||||
return False
|
||||
|
||||
|
||||
class QueryBuilder:
|
||||
"""Query builder for chaining operations like Beanie's QueryBuilder"""
|
||||
|
||||
def __init__(self, model_class: Type[BaseDoc], conditions: tuple):
|
||||
self.model_class = model_class
|
||||
self.conditions = conditions
|
||||
self._limit_value: Optional[int] = None
|
||||
self._skip_value: Optional[int] = None
|
||||
|
||||
def limit(self, n: int) -> 'QueryBuilder':
|
||||
"""Limit number of results"""
|
||||
self._limit_value = n
|
||||
return self
|
||||
|
||||
def skip(self, n: int) -> 'QueryBuilder':
|
||||
"""Skip number of results"""
|
||||
self._skip_value = n
|
||||
return self
|
||||
|
||||
async def to_list(self) -> List[BaseDoc]:
|
||||
"""Convert query to list of documents"""
|
||||
db = await self.model_class._get_database()
|
||||
collection_name = self.model_class._get_collection_name()
|
||||
collection = db[collection_name]
|
||||
|
||||
# Convert conditions to MongoDB query
|
||||
query = self.model_class._convert_conditions_to_query(self.conditions)
|
||||
|
||||
# Build cursor
|
||||
cursor = collection.find(query)
|
||||
|
||||
if self._skip_value:
|
||||
cursor = cursor.skip(self._skip_value)
|
||||
if self._limit_value:
|
||||
cursor = cursor.limit(self._limit_value)
|
||||
|
||||
# Execute query and convert to model instances
|
||||
docs = await cursor.to_list(length=None)
|
||||
results = []
|
||||
for doc in docs:
|
||||
# Extract MongoDB _id and convert to string
|
||||
mongo_id = doc.pop('_id', None)
|
||||
# Filter doc to only include fields defined in the model
|
||||
model_fields = set(self.model_class.model_fields.keys())
|
||||
filtered_doc = {k: v for k, v in doc.items() if k in model_fields}
|
||||
# Add the id field
|
||||
if mongo_id:
|
||||
filtered_doc['id'] = str(mongo_id)
|
||||
results.append(self.model_class(**filtered_doc))
|
||||
|
||||
return results
|
||||
|
||||
async def first_or_none(self) -> Optional[BaseDoc]:
|
||||
"""Get first result or None"""
|
||||
results = await self.limit(1).to_list()
|
||||
return results[0] if results else None
|
||||
|
||||
async def count(self) -> int:
|
||||
"""Count number of matching documents"""
|
||||
db = await self.model_class._get_database()
|
||||
collection_name = self.model_class._get_collection_name()
|
||||
collection = db[collection_name]
|
||||
|
||||
query = self.model_class._convert_conditions_to_query(self.conditions)
|
||||
return await collection.count_documents(query)
|
||||
3
backend/models/permission/__init__.py
Normal file
3
backend/models/permission/__init__.py
Normal file
@ -0,0 +1,3 @@
|
||||
from .models import PermissionDoc, RoleDoc, UserRoleDoc
|
||||
|
||||
permission_models = [PermissionDoc, RoleDoc, UserRoleDoc]
|
||||
26
backend/models/permission/constants.py
Normal file
26
backend/models/permission/constants.py
Normal file
@ -0,0 +1,26 @@
|
||||
from enum import IntEnum
|
||||
|
||||
|
||||
class AdministrativeRole(IntEnum):
|
||||
NONE = 0
|
||||
PERSONAL = 1
|
||||
BUSINESS = 2
|
||||
CONTRIBUTOR = 4
|
||||
ADMINISTRATOR = 8
|
||||
# now UI cannot siginin if user role is 8
|
||||
|
||||
|
||||
class Capability(IntEnum):
|
||||
VISITOR = 1
|
||||
COMMUNICATOR = 2
|
||||
REQUESTER = 4
|
||||
PROVIDER = 8
|
||||
DEVELOPER = 16
|
||||
|
||||
|
||||
class Feature(IntEnum):
|
||||
ANY = 0xFFFFFFFF
|
||||
SENDMESSAGE = 0x1
|
||||
INITIATEREQUEST = 0x2
|
||||
MAKEPROPOSAL = 0x4
|
||||
CREATEPROJECT = 0x8
|
||||
53
backend/models/permission/models.py
Normal file
53
backend/models/permission/models.py
Normal file
@ -0,0 +1,53 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional, List
|
||||
from ..base_doc import BaseDoc
|
||||
|
||||
|
||||
class PermissionDoc(BaseDoc):
|
||||
permission_name: str
|
||||
permission_key: str
|
||||
description: Optional[str] = None # Description of the permission, optional
|
||||
created_at: datetime = datetime.now() # Creation timestamp, auto-generated
|
||||
updated_at: datetime = datetime.now() # Last update timestamp, auto-updated
|
||||
is_default: bool = False
|
||||
|
||||
class Settings:
|
||||
# Default collections created by Freeleaps for tenant databases use '_' prefix
|
||||
# to prevent naming conflicts with tenant-created collections
|
||||
name = "_permission"
|
||||
indexes = [
|
||||
"permission_key"
|
||||
]
|
||||
|
||||
|
||||
class RoleDoc(BaseDoc):
|
||||
role_key: str
|
||||
role_name: str
|
||||
role_description: Optional[str] = None
|
||||
permission_ids: list[str]
|
||||
role_level: int
|
||||
revision_id: Optional[str] = None # Revision ID for version control
|
||||
created_at: datetime = datetime.now() # Creation timestamp, auto-generated
|
||||
updated_at: datetime = datetime.now() # Last update timestamp, auto-updated
|
||||
is_default: bool = False
|
||||
|
||||
class Settings:
|
||||
# Default collections created by Freeleaps for tenant databases use '_' prefix
|
||||
# to prevent naming conflicts with tenant-created collections
|
||||
name = "_role"
|
||||
indexes = [
|
||||
"role_level"
|
||||
]
|
||||
|
||||
class UserRoleDoc(BaseDoc):
|
||||
"""User role doc"""
|
||||
user_id: str
|
||||
role_ids: Optional[List[str]]
|
||||
|
||||
class Settings:
|
||||
# Default collections created by Freeleaps for tenant databases use '_' prefix
|
||||
# to prevent naming conflicts with tenant-created collections
|
||||
name = "_user_role"
|
||||
indexes = [
|
||||
"user_id"
|
||||
]
|
||||
17
backend/models/user/__init__.py
Normal file
17
backend/models/user/__init__.py
Normal file
@ -0,0 +1,17 @@
|
||||
from .models import (
|
||||
UserAccountDoc,
|
||||
UserPasswordDoc,
|
||||
UserEmailDoc,
|
||||
UserMobileDoc,
|
||||
AuthCodeDoc,
|
||||
UsageLogDoc,
|
||||
)
|
||||
|
||||
user_models = [
|
||||
UserAccountDoc,
|
||||
UserPasswordDoc,
|
||||
UserEmailDoc,
|
||||
UserMobileDoc,
|
||||
AuthCodeDoc,
|
||||
UsageLogDoc,
|
||||
]
|
||||
79
backend/models/user/constants.py
Normal file
79
backend/models/user/constants.py
Normal file
@ -0,0 +1,79 @@
|
||||
from enum import IntEnum
|
||||
from common.constants.region import UserRegion
|
||||
|
||||
|
||||
class NewUserMethod(IntEnum):
|
||||
EMAIL = 1
|
||||
MOBILE = 2
|
||||
|
||||
|
||||
class UserAccountProperty(IntEnum):
|
||||
EMAIL_VERIFIED = 1
|
||||
MOBILE_VERIFIED = 2
|
||||
PAYMENT_SETUP = 4
|
||||
ACCEPT_REQUEST = 8
|
||||
READY_PROVIDER = 16
|
||||
MANAGE_PROJECT = 32
|
||||
|
||||
|
||||
class UserLoginAction(IntEnum):
|
||||
VERIFY_EMAIL_WITH_AUTH_CODE = 0
|
||||
EXISTING_USER_PASSWORD_REQUIRED = 1
|
||||
NEW_USER_SET_PASSWORD = 2
|
||||
EMAIL_NOT_ASSOCIATED_WITH_USER = 3
|
||||
REVIEW_AND_REVISE_FLID = 4
|
||||
USER_SIGNED_IN = 100
|
||||
|
||||
|
||||
class Currency(IntEnum):
|
||||
UNKNOWN = 0
|
||||
USD = 1
|
||||
CNY = 2
|
||||
|
||||
|
||||
UserRegionToCurrency = {
|
||||
UserRegion.ZH_CN: Currency.CNY.name,
|
||||
UserRegion.OTHER: Currency.USD.name,
|
||||
}
|
||||
|
||||
|
||||
class NewUserMethod(IntEnum):
|
||||
EMAIL = 1
|
||||
MOBILE = 2
|
||||
|
||||
|
||||
class UserAccountProperty(IntEnum):
|
||||
EMAIL_VERIFIED = 1
|
||||
MOBILE_VERIFIED = 2
|
||||
PAYMENT_SETUP = 4
|
||||
ACCEPT_REQUEST = 8
|
||||
READY_PROVIDER = 16
|
||||
MANAGE_PROJECT = 32
|
||||
|
||||
|
||||
class UserLoginAction(IntEnum):
|
||||
VERIFY_EMAIL_WITH_AUTH_CODE = 0
|
||||
EXISTING_USER_PASSWORD_REQUIRED = 1
|
||||
NEW_USER_SET_PASSWORD = 2
|
||||
EMAIL_NOT_ASSOCIATED_WITH_USER = 3
|
||||
REVIEW_AND_REVISE_FLID = 4
|
||||
USER_SIGNED_IN = 100
|
||||
|
||||
|
||||
class AuthType(IntEnum):
|
||||
MOBILE = 0
|
||||
EMAIL = 1
|
||||
PASSWORD = 2
|
||||
|
||||
|
||||
class DepotStatus(IntEnum):
|
||||
TO_BE_CREATED = 0
|
||||
CREATED = 1
|
||||
DELETED = 2
|
||||
|
||||
|
||||
class UserAccountStatus(IntEnum):
|
||||
TO_BE_CREATED = 0
|
||||
CREATED = 1
|
||||
DELETED = 2
|
||||
DEACTIVATED = 3
|
||||
80
backend/models/user/models.py
Normal file
80
backend/models/user/models.py
Normal file
@ -0,0 +1,80 @@
|
||||
from typing import Optional, List
|
||||
|
||||
from ..base_doc import BaseDoc
|
||||
|
||||
from .constants import UserAccountProperty
|
||||
from backend.models.permission.constants import (
|
||||
AdministrativeRole,
|
||||
Capability,
|
||||
)
|
||||
from datetime import datetime
|
||||
from common.constants.region import UserRegion
|
||||
from .constants import AuthType
|
||||
|
||||
|
||||
class UserAccountDoc(BaseDoc):
|
||||
profile_id: Optional[str]
|
||||
account_id: Optional[str]
|
||||
service_plan_id: Optional[str]
|
||||
properties: UserAccountProperty
|
||||
capabilities: Capability
|
||||
user_role: int = AdministrativeRole.NONE
|
||||
preferred_region: UserRegion = UserRegion.ZH_CN
|
||||
|
||||
class Settings:
|
||||
name = "user_account"
|
||||
|
||||
|
||||
class UserPasswordDoc(BaseDoc):
|
||||
user_id: str
|
||||
password: str
|
||||
|
||||
class Settings:
|
||||
name = "user_password"
|
||||
|
||||
|
||||
class UserEmailDoc(BaseDoc):
|
||||
user_id: str
|
||||
email: str
|
||||
|
||||
class Settings:
|
||||
name = "user_email"
|
||||
|
||||
|
||||
class UserMobileDoc(BaseDoc):
|
||||
user_id: str
|
||||
mobile: str
|
||||
|
||||
class Settings:
|
||||
name = "user_mobile"
|
||||
|
||||
|
||||
class AuthCodeDoc(BaseDoc):
|
||||
auth_code: str
|
||||
method: str
|
||||
method_type: AuthType
|
||||
expiry: datetime
|
||||
used: bool = False
|
||||
|
||||
class Settings:
|
||||
name = "user_auth_code"
|
||||
|
||||
class UsageLogDoc(BaseDoc):
|
||||
timestamp: datetime = datetime.utcnow() # timestamp
|
||||
tenant_id: str # tenant id
|
||||
operation: str # operation type
|
||||
request_id: str # request id # TODO: use true one
|
||||
status: str # operation status
|
||||
latency_ms: int # latency time(milliseconds)
|
||||
bytes_in: int # input bytes
|
||||
bytes_out: int # output bytes
|
||||
key_id: Optional[str] = None # API Key ID
|
||||
extra: dict = {} # extra information
|
||||
|
||||
class Settings:
|
||||
name = "usage_log_doc"
|
||||
indexes = [
|
||||
"tenant_id",
|
||||
"request_id",
|
||||
"key_id"
|
||||
]
|
||||
3
backend/models/user_profile/__init__.py
Normal file
3
backend/models/user_profile/__init__.py
Normal file
@ -0,0 +1,3 @@
|
||||
from .models import BasicProfileDoc, ProviderProfileDoc
|
||||
|
||||
profile_models = [BasicProfileDoc, ProviderProfileDoc]
|
||||
103
backend/models/user_profile/models.py
Normal file
103
backend/models/user_profile/models.py
Normal file
@ -0,0 +1,103 @@
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
from pydantic import BaseModel, EmailStr
|
||||
import re
|
||||
|
||||
from decimal import Decimal
|
||||
from common.constants.region import UserRegion
|
||||
from ..base_doc import BaseDoc
|
||||
|
||||
|
||||
class Tags(BaseModel):
|
||||
skill: List[str]
|
||||
|
||||
|
||||
class SelfIntro(BaseModel):
|
||||
summary: str = ""
|
||||
content_html: str = ""
|
||||
tags: Tags
|
||||
|
||||
|
||||
class Photo(BaseModel):
|
||||
url: Optional[str]
|
||||
base64: str
|
||||
filename: str
|
||||
|
||||
|
||||
class Email(BaseModel):
|
||||
address: Optional[EmailStr]
|
||||
verified: bool = False
|
||||
|
||||
|
||||
class Mobile(BaseModel):
|
||||
number: Optional[str]
|
||||
verified: bool
|
||||
|
||||
|
||||
class FLID(BaseModel):
|
||||
identity: str
|
||||
set_by: str
|
||||
create_time: datetime
|
||||
update_time: datetime
|
||||
|
||||
|
||||
class Password(BaseModel):
|
||||
set_up: bool
|
||||
update_time: datetime
|
||||
expiry: datetime
|
||||
|
||||
|
||||
class BasicProfileDoc(BaseDoc):
|
||||
user_id: str
|
||||
first_name: str = ""
|
||||
last_name: str = ""
|
||||
spoken_language: List[str] = []
|
||||
self_intro: SelfIntro
|
||||
photo: Photo
|
||||
email: Email
|
||||
mobile: Mobile
|
||||
FLID: FLID
|
||||
password: Password
|
||||
region: int = UserRegion.OTHER
|
||||
time_zone: Optional[str] = None
|
||||
|
||||
class Settings:
|
||||
name = "basic_profile"
|
||||
indexes = [
|
||||
"user_id", # Add index for fast querying by user_id
|
||||
"email.address", # This adds an index for the 'email.address' field
|
||||
# Compound text index for fuzzy search across multiple fields
|
||||
[("first_name", "text"), ("last_name", "text"), ("email.address", "text")],
|
||||
]
|
||||
|
||||
@classmethod
|
||||
async def fuzzy_search(cls, query: str) -> List["BasicProfileDoc"]:
|
||||
# Create a case-insensitive regex pattern for partial matching
|
||||
regex = re.compile(f".*{query}.*", re.IGNORECASE)
|
||||
|
||||
# Perform a search on first_name, last_name, and email fields using $or
|
||||
results = await cls.find(
|
||||
{
|
||||
"$or": [
|
||||
{"first_name": {"$regex": regex}},
|
||||
{"last_name": {"$regex": regex}},
|
||||
{"email.address": {"$regex": regex}},
|
||||
]
|
||||
}
|
||||
).to_list()
|
||||
|
||||
return results
|
||||
|
||||
|
||||
class ExpectedSalary(BaseModel):
|
||||
currency: str = "USD"
|
||||
hourly: Decimal = 0.0
|
||||
|
||||
|
||||
class ProviderProfileDoc(BaseDoc):
|
||||
user_id: str
|
||||
expected_salary: ExpectedSalary
|
||||
accepting_request: bool = False
|
||||
|
||||
class Settings:
|
||||
name = "provider_profile"
|
||||
60
backend/services/auth/user_auth_service.py
Normal file
60
backend/services/auth/user_auth_service.py
Normal file
@ -0,0 +1,60 @@
|
||||
from backend.infra.auth.user_auth_handler import (
|
||||
UserAuthHandler,
|
||||
)
|
||||
from backend.models.user.constants import (
|
||||
AuthType,
|
||||
)
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class UserAuthService:
|
||||
def __init__(self):
|
||||
self.user_auth_handler = UserAuthHandler()
|
||||
|
||||
async def get_user_id_by_email(self, email: str) -> Optional[str]:
|
||||
return await self.user_auth_handler.get_user_id_by_email(email)
|
||||
|
||||
async def verify_email_with_code(self, email: str, code: str) -> bool:
|
||||
return await self.user_auth_handler.verify_email_code(email, code)
|
||||
|
||||
async def is_password_reset_required(self, user_id: str) -> bool:
|
||||
return await self.user_auth_handler.is_password_reset_required(user_id)
|
||||
|
||||
async def is_flid_reset_required(self, user_id: str) -> bool:
|
||||
return await self.user_auth_handler.is_flid_reset_required(user_id)
|
||||
|
||||
async def is_flid_available(self, user_flid: str) -> bool:
|
||||
return await self.user_auth_handler.is_flid_available(user_flid)
|
||||
|
||||
async def get_user_flid(self, user_id: str) -> str:
|
||||
return await self.user_auth_handler.get_flid(user_id)
|
||||
|
||||
async def update_flid(self, user_id: str, user_flid: str) -> str:
|
||||
return await self.user_auth_handler.update_flid(user_id, user_flid)
|
||||
|
||||
async def generate_auth_code_for_object(
|
||||
self, deliver_object: str, auth_type: AuthType
|
||||
) -> str:
|
||||
return await self.user_auth_handler.generate_auth_code(
|
||||
deliver_object, auth_type
|
||||
)
|
||||
|
||||
async def verify_user_with_password(self, user_id: str, password: str) -> bool:
|
||||
return await self.user_auth_handler.verify_user_with_password(user_id, password)
|
||||
|
||||
async def reset_password(self, user_id: str):
|
||||
return await self.user_auth_handler.reset_password(user_id)
|
||||
|
||||
async def save_password_auth_method(
|
||||
self, user_id: str, user_flid: str, password: str
|
||||
):
|
||||
return await self.user_auth_handler.save_password_auth_method(
|
||||
user_id, user_flid, password
|
||||
)
|
||||
|
||||
async def save_password_auth_method_no_depot(
|
||||
self, user_id: str, user_flid: str, password: str
|
||||
):
|
||||
return await self.user_auth_handler.save_password_auth_method_no_depot(
|
||||
user_id, user_flid, password
|
||||
)
|
||||
132
backend/services/code_depot/code_depot_service.py
Normal file
132
backend/services/code_depot/code_depot_service.py
Normal file
@ -0,0 +1,132 @@
|
||||
from common.log.module_logger import ModuleLogger
|
||||
|
||||
from typing import List, Dict, Optional
|
||||
|
||||
from common.config.app_settings import app_settings
|
||||
|
||||
import httpx
|
||||
import asyncio
|
||||
|
||||
|
||||
async def fetch_with_retry(url, method="GET", retries=3, backoff=1.0, **kwargs):
|
||||
"""
|
||||
A generic function for making HTTP requests with retry logic.
|
||||
|
||||
Parameters:
|
||||
url (str): The endpoint URL.
|
||||
method (str): HTTP method ('GET', 'POST', etc.).
|
||||
retries (int): Number of retry attempts.
|
||||
backoff (float): Backoff time in seconds.
|
||||
kwargs: Additional arguments for the request.
|
||||
|
||||
Returns:
|
||||
httpx.Response: The response object.
|
||||
"""
|
||||
for attempt in range(retries):
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=httpx.Timeout(10.0)) as client:
|
||||
if method.upper() == "GET":
|
||||
response = await client.get(url, **kwargs)
|
||||
elif method.upper() == "POST":
|
||||
response = await client.post(url, **kwargs)
|
||||
response.raise_for_status() # Check for HTTP errors
|
||||
return response
|
||||
except (httpx.ReadTimeout, httpx.RequestError) as exc:
|
||||
if attempt < retries - 1:
|
||||
await asyncio.sleep(backoff * (2**attempt)) # Exponential backoff
|
||||
continue
|
||||
else:
|
||||
raise exc
|
||||
|
||||
|
||||
class CodeDepotService:
|
||||
def __init__(self) -> None:
|
||||
self.depot_endpoint = (
|
||||
app_settings.DEVSVC_WEBAPI_URL_BASE.rstrip("/") + "/depot/"
|
||||
)
|
||||
self.module_logger = ModuleLogger(sender_id="CodeDepotService")
|
||||
|
||||
async def check_depot_name_availabe(self, code_depot_name: str) -> bool:
|
||||
api_url = self.depot_endpoint + "check-depot-name-available/" + code_depot_name
|
||||
response = await fetch_with_retry(api_url)
|
||||
return response.json()
|
||||
|
||||
async def create_code_depot(self, product_id, code_depot_name) -> Optional[str]:
|
||||
api_url = self.depot_endpoint + "create-code-depot"
|
||||
response = await fetch_with_retry(
|
||||
api_url,
|
||||
method="POST",
|
||||
json={"product_id": product_id, "code_depot_name": code_depot_name},
|
||||
)
|
||||
return response.json()
|
||||
|
||||
async def get_depot_ssh_url(self, code_depot_name: str) -> str:
|
||||
api_url = self.depot_endpoint + "get-depot-ssh-url/" + code_depot_name
|
||||
response = await fetch_with_retry(api_url)
|
||||
return response.json()
|
||||
|
||||
async def get_depot_http_url(self, code_depot_name: str) -> str:
|
||||
api_url = self.depot_endpoint + "get-depot-http-url/" + code_depot_name
|
||||
response = await fetch_with_retry(api_url)
|
||||
return response.json()
|
||||
|
||||
async def get_depot_http_url_with_user_name(
|
||||
self, code_depot_name: str, user_name: str
|
||||
) -> str:
|
||||
api_url = (
|
||||
self.depot_endpoint
|
||||
+ "get-depot-http-url-with-user-name/"
|
||||
+ code_depot_name
|
||||
+ "/"
|
||||
+ user_name
|
||||
)
|
||||
response = await fetch_with_retry(api_url)
|
||||
return response.json()
|
||||
|
||||
async def get_depot_users(self, code_depot_name: str) -> List[str]:
|
||||
api_url = self.depot_endpoint + "get-depot-users/" + code_depot_name
|
||||
response = await fetch_with_retry(api_url)
|
||||
return response.json()
|
||||
|
||||
async def update_depot_user_password(self, user_name: str, password: str) -> bool:
|
||||
api_url = self.depot_endpoint + "update-depot-password-for-user"
|
||||
response = await fetch_with_retry(
|
||||
api_url,
|
||||
method="POST",
|
||||
json={"user_name": user_name, "password": password},
|
||||
)
|
||||
return response.json()
|
||||
|
||||
async def create_depot_user(
|
||||
self, user_name: str, password: str, email: str
|
||||
) -> bool:
|
||||
api_url = self.depot_endpoint + "create-depot-user"
|
||||
response = await fetch_with_retry(
|
||||
api_url,
|
||||
method="POST",
|
||||
json={"user_name": user_name, "password": password, "email": email},
|
||||
)
|
||||
return response.json()
|
||||
|
||||
async def grant_user_depot_access(
|
||||
self, user_name: str, code_depot_name: str
|
||||
) -> bool:
|
||||
api_url = self.depot_endpoint + "grant-user-depot-access"
|
||||
response = await fetch_with_retry(
|
||||
api_url,
|
||||
method="POST",
|
||||
json={"user_name": user_name, "code_depot_name": code_depot_name},
|
||||
)
|
||||
return response.json()
|
||||
|
||||
async def generate_statistic_result(
|
||||
self, code_depot_name: str
|
||||
) -> Optional[Dict[str, any]]:
|
||||
api_url = self.depot_endpoint + "generate-statistic-result/" + code_depot_name
|
||||
response = await fetch_with_retry(api_url)
|
||||
return response.json()
|
||||
|
||||
async def fetch_code_depot(self, code_depot_id: str) -> Optional[Dict[str, any]]:
|
||||
api_url = self.depot_endpoint + "fetch-code-depot/" + code_depot_id
|
||||
response = await fetch_with_retry(api_url)
|
||||
return response.json()
|
||||
37
backend/services/notification/notification_service.py
Normal file
37
backend/services/notification/notification_service.py
Normal file
@ -0,0 +1,37 @@
|
||||
import httpx
|
||||
from common.config.app_settings import app_settings
|
||||
from typing import Dict, List
|
||||
|
||||
|
||||
class NotificationService:
|
||||
def __init__(self):
|
||||
self.notification_api_url = app_settings.NOTIFICATION_WEBAPI_URL_BASE.rstrip(
|
||||
"/"
|
||||
)
|
||||
|
||||
async def send_notification(
|
||||
self,
|
||||
sender_id: str,
|
||||
channels: List[str],
|
||||
receiver_id: str,
|
||||
subject: str,
|
||||
event: str,
|
||||
properties: Dict,
|
||||
) -> bool:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(
|
||||
f"{self.notification_api_url}/send_notification",
|
||||
json={
|
||||
"sender_id": sender_id,
|
||||
"channels": channels,
|
||||
"receiver_id": receiver_id,
|
||||
"subject": subject,
|
||||
"event": event,
|
||||
"properties": properties,
|
||||
},
|
||||
)
|
||||
if response.status_code == 200:
|
||||
return True
|
||||
else:
|
||||
# Optionally log or handle errors here
|
||||
return False
|
||||
47
backend/services/permission/permission_service.py
Normal file
47
backend/services/permission/permission_service.py
Normal file
@ -0,0 +1,47 @@
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
from fastapi.exceptions import RequestValidationError
|
||||
|
||||
from backend.infra.permission.permission_handler import PermissionHandler
|
||||
from backend.models.permission.models import PermissionDoc
|
||||
from bson import ObjectId
|
||||
|
||||
class PermissionService:
|
||||
def __init__(self):
|
||||
self.permission_handler = PermissionHandler()
|
||||
|
||||
async def create_permission(self, permission_key: str, permission_name: str, description: Optional[str] = None) -> PermissionDoc:
|
||||
"""Create a new permission document"""
|
||||
return await self.permission_handler.create_permission(permission_key, permission_name, description)
|
||||
|
||||
async def update_permission(self, permission_id: str, permission_key: Optional[str] = None, permission_name: Optional[str] = None, description: Optional[str] = None) -> PermissionDoc:
|
||||
"""Update an existing permission document by id"""
|
||||
return await self.permission_handler.update_permission(permission_id, permission_key, permission_name, description)
|
||||
|
||||
async def create_or_update_permission(self, permission_key: str, permission_name: str, custom_permission_id: Optional[str], description: Optional[str] = None) -> PermissionDoc:
|
||||
"""Create or update a permission document"""
|
||||
return await self.permission_handler.create_or_update_permission(permission_key, permission_name, custom_permission_id, description)
|
||||
|
||||
async def query_permissions(self, permission_key: Optional[str] = None, permission_name: Optional[str] = None, page: int = 1, page_size: int = 10) -> Dict[str, Any]:
|
||||
"""Query permissions with pagination and fuzzy search"""
|
||||
if page < 1 or page_size < 1:
|
||||
raise RequestValidationError("page and page_size must be positive integers.")
|
||||
skip = (page - 1) * page_size
|
||||
docs, total = await self.permission_handler.query_permissions(permission_key, permission_name, skip, page_size)
|
||||
return {
|
||||
"items": [doc.model_dump() for doc in docs],
|
||||
"total": total,
|
||||
"page": page,
|
||||
"page_size": page_size
|
||||
}
|
||||
async def query_permissions_no_pagination(self, permission_id: Optional[str] = None, permission_key: Optional[str] = None, permission_name: Optional[str] = None) -> Dict[str, Any]:
|
||||
"""Query permissions fuzzy search"""
|
||||
docs, total = await self.permission_handler.query_permissions_no_pagination(permission_id, permission_key, permission_name)
|
||||
return {
|
||||
"items": [doc.model_dump() for doc in docs],
|
||||
"total": total
|
||||
}
|
||||
|
||||
async def delete_permission(self, permission_id: str) -> None:
|
||||
"""Delete a permission document after checking if it is referenced by any role"""
|
||||
return await self.permission_handler.delete_permission(permission_id)
|
||||
56
backend/services/permission/role_service.py
Normal file
56
backend/services/permission/role_service.py
Normal file
@ -0,0 +1,56 @@
|
||||
from typing import Optional, Dict, Any, List
|
||||
|
||||
from fastapi.exceptions import RequestValidationError
|
||||
|
||||
from backend.infra.permission.role_handler import RoleHandler
|
||||
from backend.models.permission.models import RoleDoc
|
||||
from bson import ObjectId
|
||||
|
||||
class RoleService:
|
||||
def __init__(self):
|
||||
self.role_handler = RoleHandler()
|
||||
|
||||
async def create_role(self, role_key: str, role_name: str, role_description: Optional[str], role_level: int) -> RoleDoc:
|
||||
"""Create a new role, ensuring role_key and role_name are unique and not empty"""
|
||||
|
||||
doc = await self.role_handler.create_role(role_key, role_name, role_description, role_level)
|
||||
return doc
|
||||
|
||||
async def update_role(self, role_id: str, role_key: str, role_name: str, role_description: Optional[str], role_level: int) -> RoleDoc:
|
||||
"""Update an existing role, ensuring role_key and role_name are unique and not empty"""
|
||||
|
||||
doc = await self.role_handler.update_role(role_id, role_key, role_name, role_description, role_level)
|
||||
return doc
|
||||
|
||||
async def create_or_update_role(self, role_key: str, role_name: str, role_level: int, custom_role_id: Optional[str], role_description: Optional[str] = None) -> RoleDoc:
|
||||
"""Create or update a role document"""
|
||||
return await self.role_handler.create_or_update_role(role_key, role_name, role_level, custom_role_id, role_description)
|
||||
|
||||
async def query_roles(self, role_key: Optional[str], role_name: Optional[str], page: int = 1, page_size: int = 10) -> Dict[str, Any]:
|
||||
"""Query roles with pagination and fuzzy search by role_key and role_name"""
|
||||
if page < 1 or page_size < 1:
|
||||
raise RequestValidationError("page and page_size must be positive integers.")
|
||||
skip = (page - 1) * page_size
|
||||
docs, total = await self.role_handler.query_roles(role_key, role_name, skip, page_size)
|
||||
return {
|
||||
"items": [doc.model_dump() for doc in docs],
|
||||
"total": total,
|
||||
"page": page,
|
||||
"page_size": page_size
|
||||
}
|
||||
|
||||
async def query_roles_no_pagination(self, role_id: Optional[str] = None, role_key: Optional[str] = None, role_name: Optional[str] = None) -> Dict[str, Any]:
|
||||
"""Query roles fuzzy search without pagination"""
|
||||
docs, total = await self.role_handler.query_roles_no_pagination(role_id, role_key, role_name)
|
||||
return {
|
||||
"items": [doc.model_dump() for doc in docs],
|
||||
"total": total
|
||||
}
|
||||
|
||||
async def assign_permissions_to_role(self, role_id: str, permission_ids: List[str]) -> RoleDoc:
|
||||
"""Assign permissions to a role by updating the permission_ids field"""
|
||||
return await self.role_handler.assign_permissions_to_role(role_id, permission_ids)
|
||||
|
||||
async def delete_role(self, role_id: str) -> None:
|
||||
"""Delete a role document after checking if it is referenced by any user"""
|
||||
return await self.role_handler.delete_role(role_id)
|
||||
117
backend/services/user/user_management_service.py
Normal file
117
backend/services/user/user_management_service.py
Normal file
@ -0,0 +1,117 @@
|
||||
from backend.models.permission.models import UserRoleDoc
|
||||
from common.log.module_logger import ModuleLogger
|
||||
from typing import Optional, List, Tuple
|
||||
|
||||
from backend.models.user.constants import (
|
||||
NewUserMethod,
|
||||
UserAccountProperty,
|
||||
)
|
||||
from backend.models.user.models import UserAccountDoc
|
||||
from backend.models.permission.constants import (
|
||||
AdministrativeRole,
|
||||
Capability,
|
||||
)
|
||||
from backend.infra.auth.user_auth_handler import (
|
||||
UserAuthHandler,
|
||||
)
|
||||
from backend.infra.user_profile.user_profile_handler import (
|
||||
UserProfileHandler,
|
||||
)
|
||||
from backend.infra.permission.user_role_handler import (
|
||||
UserRoleHandler,
|
||||
)
|
||||
from common.log.log_utils import log_entry_exit_async
|
||||
from common.constants.region import UserRegion
|
||||
|
||||
|
||||
class UserManagementService:
|
||||
def __init__(self) -> None:
|
||||
self.user_auth_handler = UserAuthHandler()
|
||||
self.user_profile_handler = UserProfileHandler()
|
||||
self.user_role_handler = UserRoleHandler()
|
||||
self.module_logger = ModuleLogger(sender_id=UserManagementService)
|
||||
|
||||
@log_entry_exit_async
|
||||
async def create_new_user_account(
|
||||
self, method: NewUserMethod, region: UserRegion
|
||||
) -> UserAccountDoc:
|
||||
"""create a new user account document in DB
|
||||
|
||||
Args:
|
||||
method (NewUserMethod): the method the new user came from
|
||||
region : preferred user region detected via the user log-in website
|
||||
|
||||
Returns:
|
||||
str: id of user account
|
||||
"""
|
||||
if NewUserMethod.EMAIL == method:
|
||||
user_account = await self.user_profile_handler.create_new_user_account(
|
||||
UserAccountProperty.EMAIL_VERIFIED,
|
||||
Capability.VISITOR,
|
||||
AdministrativeRole.PERSONAL,
|
||||
region,
|
||||
)
|
||||
|
||||
elif NewUserMethod.MOBILE == method:
|
||||
user_account = await self.user_profile_handler.create_new_user_account(
|
||||
UserAccountProperty.EMAIL_VERIFIED,
|
||||
Capability.VISITOR,
|
||||
AdministrativeRole.PERSONAL,
|
||||
region,
|
||||
)
|
||||
|
||||
# Create other doc in collections for the new user
|
||||
# TODO: Should convert to notification
|
||||
# await UserAchievement(str(user_account.id)).create_activeness_achievement()
|
||||
return user_account
|
||||
|
||||
async def initialize_new_user_data(
|
||||
self,
|
||||
user_id: str,
|
||||
method: NewUserMethod,
|
||||
email_address: str = None,
|
||||
mobile_number: str = None,
|
||||
region: UserRegion = UserRegion.ZH_CN,
|
||||
time_zone: Optional[str] = "UTC",
|
||||
):
|
||||
"""Init data for the new user
|
||||
|
||||
Args:
|
||||
user_id (str): user id
|
||||
method (NewUserMethod): the method the new user came from
|
||||
|
||||
Returns:
|
||||
result: True if initilize data for the new user successfully, else return False
|
||||
"""
|
||||
|
||||
# create basic and provider profile doc for the new user
|
||||
if NewUserMethod.EMAIL == method:
|
||||
await self.user_profile_handler.create_basic_profile(
|
||||
user_id, email_address, True, None, False, False, region, time_zone
|
||||
)
|
||||
await self.user_auth_handler.save_email_auth_method(user_id, email_address)
|
||||
elif NewUserMethod.MOBILE == method:
|
||||
await self.user_profile_handler.create_basic_profile(
|
||||
user_id, None, False, mobile_number, True, False, region, time_zone
|
||||
)
|
||||
else:
|
||||
return False
|
||||
|
||||
await self.user_profile_handler.create_provider_profile(user_id)
|
||||
return True
|
||||
|
||||
async def get_account_by_id(self, user_id: str) -> UserAccountDoc:
|
||||
return await self.user_profile_handler.get_account_by_id(user_id)
|
||||
|
||||
async def assign_roles_to_user(self, user_id: str, role_ids: List[str]) -> UserRoleDoc:
|
||||
"""Assign roles to a user by updating or creating the UserRoleDoc"""
|
||||
return await self.user_role_handler.assign_roles_to_user(user_id, role_ids)
|
||||
|
||||
async def get_role_and_permission_by_user_id(self, user_id: str) -> Tuple[List[str], List[str]]:
|
||||
"""Get user role names and permission keys by user id
|
||||
Args:
|
||||
user_id (str): user id
|
||||
Returns:
|
||||
Tuple[List[str], List[str]]: user role names and permission keys
|
||||
"""
|
||||
return await self.user_role_handler.get_role_and_permission_by_user_id(user_id)
|
||||
0
common/__init__.py
Normal file
0
common/__init__.py
Normal file
0
common/config/__init__.py
Normal file
0
common/config/__init__.py
Normal file
38
common/config/app_settings.py
Normal file
38
common/config/app_settings.py
Normal file
@ -0,0 +1,38 @@
|
||||
import os
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class AppSettings(BaseSettings):
|
||||
NAME: str = "authentication"
|
||||
APP_NAME: str = NAME
|
||||
APP_ENV: str = os.environ.get("APP_ENV", "alpha")
|
||||
|
||||
METRICS_ENABLED: bool = False
|
||||
PROBES_ENABLED: bool = True
|
||||
|
||||
JWT_SECRET_KEY: str = ""
|
||||
JWT_ALGORITHM: str = "HS256"
|
||||
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES: int = 3600
|
||||
REFRESH_TOKEN_EXPIRE_DAYS: int = 1
|
||||
|
||||
DEVSVC_WEBAPI_URL_BASE: str = "http://localhost:8007/api/devsvc/"
|
||||
NOTIFICATION_WEBAPI_URL_BASE: str = "http://localhost:8003/api/notification/"
|
||||
|
||||
AUTH_SERVICE_ENDPOINT: str = ""
|
||||
|
||||
MONGODB_URI: str = ""
|
||||
MONGODB_NAME: str = ""
|
||||
TENANT_CACHE_MAX: int = 64
|
||||
SYSTEM_USER_ID: str = "117f191e810c19729de860aa"
|
||||
|
||||
LOG_BASE_PATH: str = "./log"
|
||||
BACKEND_LOG_FILE_NAME: str = APP_NAME
|
||||
APPLICATION_ACTIVITY_LOG: str = APP_NAME + "-application-activity"
|
||||
|
||||
class Config:
|
||||
env_file = ".myapp.env"
|
||||
env_file_encoding = "utf-8"
|
||||
|
||||
|
||||
app_settings = AppSettings()
|
||||
17
common/config/log_settings.py
Normal file
17
common/config/log_settings.py
Normal file
@ -0,0 +1,17 @@
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from .app_settings import app_settings
|
||||
|
||||
|
||||
@dataclass
|
||||
class LogSettings:
|
||||
LOG_PATH_BASE: str = app_settings.LOG_BASE_PATH
|
||||
LOG_RETENTION: str = os.environ.get("LOG_RETENTION", "30 days")
|
||||
LOG_ROTATION: str = os.environ.get("LOG_ROTATION", "00:00") # midnight
|
||||
MAX_BACKUP_FILES: int = int(os.environ.get("LOG_BACKUP_FILES", 5))
|
||||
LOG_ROTATION_BYTES: int = int(os.environ.get("LOG_ROTATION_BYTES", 10 * 1024 * 1024)) # 10 MB
|
||||
APP_NAME: str = app_settings.APP_NAME
|
||||
ENVIRONMENT: str = app_settings.APP_ENV
|
||||
|
||||
|
||||
log_settings = LogSettings()
|
||||
0
common/constants/__init__.py
Normal file
0
common/constants/__init__.py
Normal file
2
common/constants/jwt_constants.py
Normal file
2
common/constants/jwt_constants.py
Normal file
@ -0,0 +1,2 @@
|
||||
USER_ROLE_NAMES = "role_names"
|
||||
USER_PERMISSIONS = "user_permissions"
|
||||
5
common/constants/region.py
Normal file
5
common/constants/region.py
Normal file
@ -0,0 +1,5 @@
|
||||
from enum import IntEnum
|
||||
|
||||
class UserRegion(IntEnum):
|
||||
OTHER = 0
|
||||
ZH_CN = 1
|
||||
0
common/exception/__init__.py
Normal file
0
common/exception/__init__.py
Normal file
23
common/exception/exceptions.py
Normal file
23
common/exception/exceptions.py
Normal file
@ -0,0 +1,23 @@
|
||||
class DoesNotExistError(Exception):
|
||||
def __init__(self, message: str = "Does Not Exist"):
|
||||
self.message = message
|
||||
|
||||
|
||||
class AuthenticationError(Exception):
|
||||
def __init__(self, message: str = "Unauthorized"):
|
||||
self.message = message
|
||||
|
||||
|
||||
class AuthorizationError(Exception):
|
||||
def __init__(self, message: str = "Forbidden"):
|
||||
self.message = message
|
||||
|
||||
|
||||
class InvalidOperationError(Exception):
|
||||
def __init__(self, message: str = "Invalid Operation"):
|
||||
self.message = message
|
||||
|
||||
|
||||
class InvalidDataError(Exception):
|
||||
def __init__(self, message: str = "Invalid Data"):
|
||||
self.message = message
|
||||
0
common/log/__init__.py
Normal file
0
common/log/__init__.py
Normal file
12
common/log/application_logger.py
Normal file
12
common/log/application_logger.py
Normal file
@ -0,0 +1,12 @@
|
||||
from .base_logger import LoggerBase
|
||||
from common.config.app_settings import app_settings
|
||||
|
||||
class ApplicationLogger(LoggerBase):
|
||||
def __init__(self, application_activities: dict[str, any] = {}) -> None:
|
||||
extra_fileds = {}
|
||||
if application_activities:
|
||||
extra_fileds.update(application_activities)
|
||||
super().__init__(
|
||||
logger_name=app_settings.APPLICATION_ACTIVITY_LOG,
|
||||
extra_fileds=extra_fileds,
|
||||
)
|
||||
136
common/log/base_logger.py
Normal file
136
common/log/base_logger.py
Normal file
@ -0,0 +1,136 @@
|
||||
from loguru import logger as guru_logger
|
||||
from common.config.log_settings import log_settings
|
||||
from typing import Dict, Any, Optional
|
||||
import socket
|
||||
import json
|
||||
import threading
|
||||
import os
|
||||
import sys
|
||||
import inspect
|
||||
import logging
|
||||
|
||||
from common.log.json_sink import JsonSink
|
||||
|
||||
class LoggerBase:
|
||||
binded_loggers = {}
|
||||
logger_lock = threading.Lock()
|
||||
|
||||
def __init__(self, logger_name: str, extra_fileds: dict[str, any]) -> None:
|
||||
self.__logger_name = logger_name
|
||||
self.extra_fileds = extra_fileds
|
||||
with LoggerBase.logger_lock:
|
||||
if self.__logger_name in LoggerBase.binded_loggers:
|
||||
self.logger = LoggerBase.binded_loggers[self.__logger_name]
|
||||
return
|
||||
|
||||
log_filename = f"{log_settings.LOG_PATH_BASE}/{self.__logger_name}.log"
|
||||
log_level = "INFO"
|
||||
rotation_bytes = int(log_settings.LOG_ROTATION_BYTES or 10 * 1024 * 1024)
|
||||
|
||||
guru_logger.remove()
|
||||
|
||||
file_sink = JsonSink(
|
||||
log_file_path=log_filename,
|
||||
rotation_size_bytes=rotation_bytes,
|
||||
max_backup_files=log_settings.MAX_BACKUP_FILES
|
||||
)
|
||||
guru_logger.add(
|
||||
sink=file_sink,
|
||||
level=log_level,
|
||||
filter=lambda record: record["extra"].get("topic") == self.__logger_name,
|
||||
)
|
||||
|
||||
guru_logger.add(
|
||||
sink=sys.stderr,
|
||||
level=log_level,
|
||||
format="{level} - {time:YYYY-MM-DD HH:mm:ss} - <{extra[log_file]}:{extra[log_line]}> - {extra[properties_str]} - {message}",
|
||||
filter=lambda record: record["extra"].get("topic") == self.__logger_name,
|
||||
)
|
||||
|
||||
host_name = socket.gethostname()
|
||||
host_ip = socket.gethostbyname(host_name)
|
||||
self.logger = guru_logger.bind(
|
||||
topic=self.__logger_name,
|
||||
host_ip=host_ip,
|
||||
host_name=host_name,
|
||||
app=log_settings.APP_NAME,
|
||||
env=log_settings.ENVIRONMENT,
|
||||
)
|
||||
with LoggerBase.logger_lock:
|
||||
LoggerBase.binded_loggers[self.__logger_name] = self.logger
|
||||
|
||||
def _get_log_context(self) -> dict:
|
||||
frame = inspect.currentframe().f_back.f_back
|
||||
filename = os.path.basename(frame.f_code.co_filename)
|
||||
lineno = frame.f_lineno
|
||||
return {"log_file": filename, "log_line": lineno}
|
||||
|
||||
def _prepare_properties(self, properties: Optional[Dict[str, Any]]) -> Dict[str, Any]:
|
||||
props = {} if properties is None else properties.copy()
|
||||
props_str = json.dumps(props, ensure_ascii=False) if props else "{}"
|
||||
return props, props_str
|
||||
|
||||
async def log_event(self, sender_id: str, receiver_id: str, subject: str, event: str, properties: dict[str, any], text: str = "") -> None:
|
||||
props, props_str = self._prepare_properties(properties)
|
||||
context = self._get_log_context()
|
||||
local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event=event, properties=props, properties_str=props_str, **context)
|
||||
local_logger.info(text)
|
||||
|
||||
async def log_exception(self, sender_id: str, receiver_id: str, subject: str, exception: Exception, text: str = "", properties: dict[str, any] = None) -> None:
|
||||
props, props_str = self._prepare_properties(properties)
|
||||
context = self._get_log_context()
|
||||
local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="exception", properties=props, properties_str=props_str, exception=exception, **context)
|
||||
local_logger.exception(text)
|
||||
|
||||
async def log_info(self, sender_id: str, receiver_id: str, subject: str, text: str = "", properties: dict[str, any] = None) -> None:
|
||||
props, props_str = self._prepare_properties(properties)
|
||||
context = self._get_log_context()
|
||||
local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="information", properties=props, properties_str=props_str, **context)
|
||||
local_logger.info(text)
|
||||
|
||||
async def log_warning(self, sender_id: str, receiver_id: str, subject: str, text: str = "", properties: dict[str, any] = None) -> None:
|
||||
props, props_str = self._prepare_properties(properties)
|
||||
context = self._get_log_context()
|
||||
local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="warning", properties=props, properties_str=props_str, **context)
|
||||
local_logger.warning(text)
|
||||
|
||||
async def log_error(self, sender_id: str, receiver_id: str, subject: str, text: str = "", properties: dict[str, any] = None) -> None:
|
||||
props, props_str = self._prepare_properties(properties)
|
||||
context = self._get_log_context()
|
||||
local_logger = self.logger.bind(sender_id=sender_id, receiver_id=receiver_id, subject=subject, event="error", properties=props, properties_str=props_str, **context)
|
||||
local_logger.error(text)
|
||||
|
||||
@staticmethod
|
||||
def configure_uvicorn_logging():
|
||||
print("📢 Setting up uvicorn logging interception...")
|
||||
|
||||
# Intercept logs from these loggers
|
||||
intercept_loggers = ["uvicorn", "uvicorn.access", "uvicorn.error", "fastapi"]
|
||||
|
||||
class InterceptHandler(logging.Handler):
|
||||
def emit(self, record):
|
||||
level = (
|
||||
guru_logger.level(record.levelname).name
|
||||
if guru_logger.level(record.levelname, None)
|
||||
else record.levelno
|
||||
)
|
||||
frame, depth = logging.currentframe(), 2
|
||||
while frame.f_code.co_filename == logging.__file__:
|
||||
frame = frame.f_back
|
||||
depth += 1
|
||||
|
||||
guru_logger.opt(depth=depth, exception=record.exc_info).log(
|
||||
level,
|
||||
f"[{record.name}] {record.getMessage()}",
|
||||
)
|
||||
|
||||
# Replace default handlers
|
||||
logging.root.handlers.clear()
|
||||
logging.root.setLevel(logging.INFO)
|
||||
logging.root.handlers = [InterceptHandler()]
|
||||
|
||||
# Configure specific uvicorn loggers
|
||||
for logger_name in intercept_loggers:
|
||||
logging_logger = logging.getLogger(logger_name)
|
||||
logging_logger.handlers.clear() # Remove default handlers
|
||||
logging_logger.propagate = True # Ensure propagation through Loguru
|
||||
25
common/log/business_metric_logger.py
Normal file
25
common/log/business_metric_logger.py
Normal file
@ -0,0 +1,25 @@
|
||||
from .base_logger import LoggerBase
|
||||
from common.config.app_settings import app_settings
|
||||
import json
|
||||
|
||||
|
||||
class BusinessMetricLogger(LoggerBase):
|
||||
def __init__(self, business_metrics: dict[str, any] = {}) -> None:
|
||||
extra_fileds = {}
|
||||
if business_metrics:
|
||||
extra_fileds.update(business_metrics)
|
||||
super().__init__(
|
||||
logger_name=app_settings.BUSINESS_METRIC_LOG,
|
||||
extra_fileds=extra_fileds,
|
||||
)
|
||||
|
||||
|
||||
async def log_metrics(self, business_metrics: dict[str, any] = {}) -> None:
|
||||
return await super().log_event(
|
||||
sender_id="business_metric_manager",
|
||||
receiver_id="business_metric_logger",
|
||||
subject="metrics",
|
||||
event="logging",
|
||||
properties=business_metrics,
|
||||
text="business metric logged"
|
||||
)
|
||||
50
common/log/function_logger.py
Normal file
50
common/log/function_logger.py
Normal file
@ -0,0 +1,50 @@
|
||||
from .application_logger import ApplicationLogger
|
||||
|
||||
|
||||
class FunctionLogger(ApplicationLogger):
|
||||
def __init__(self, sender_id: str, receiver_id:str) -> None:
|
||||
super().__init__()
|
||||
self.event_sender_id = sender_id
|
||||
self.event_receiver_id = receiver_id
|
||||
self.event_subject = "function"
|
||||
|
||||
async def log_enter(self, function: str, file: str):
|
||||
return await super().log_event(
|
||||
sender_id=self.event_sender_id,
|
||||
receiver_id=self.event_receiver_id,
|
||||
subject=self.event_subject,
|
||||
event="enter",
|
||||
properties={
|
||||
"function": function,
|
||||
"file": file,
|
||||
},
|
||||
text="Enter:{} of {}".format(function, file)
|
||||
)
|
||||
|
||||
async def log_exit(self, function: str, file: str, excution_time_in_ns: int):
|
||||
return await super().log_event(
|
||||
sender_id=self.event_sender_id,
|
||||
receiver_id=self.event_receiver_id,
|
||||
subject=self.event_subject,
|
||||
event="exit",
|
||||
properties={
|
||||
"function": function,
|
||||
"file": file,
|
||||
"excution_time_in_ns": excution_time_in_ns
|
||||
},
|
||||
text="Exit:{} of {}".format(function, file)
|
||||
)
|
||||
|
||||
async def log_exception(self, exception: Exception, function: str, file: str, excution_time_in_ns: int) -> None:
|
||||
return await super().log_exception(
|
||||
sender_id=self.event_sender_id,
|
||||
receiver_id=self.event_receiver_id,
|
||||
subject=self.event_subject,
|
||||
exception=exception,
|
||||
text="Exception:{} of {}".format(function, file),
|
||||
properties={
|
||||
"function": function,
|
||||
"file": file,
|
||||
"excution_time_in_ns": excution_time_in_ns
|
||||
},
|
||||
)
|
||||
85
common/log/json_sink.py
Normal file
85
common/log/json_sink.py
Normal file
@ -0,0 +1,85 @@
|
||||
import json
|
||||
import datetime
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
class JsonSink:
|
||||
def __init__(
|
||||
self,
|
||||
log_file_path: str,
|
||||
rotation_size_bytes: int = 10 * 1024 * 1024,
|
||||
max_backup_files: int = 5,
|
||||
):
|
||||
self.log_file_path = Path(log_file_path)
|
||||
self.rotation_size = rotation_size_bytes
|
||||
self.max_backup_files = max_backup_files
|
||||
self._open_log_file()
|
||||
|
||||
def _open_log_file(self):
|
||||
# ensure the parent directory exists
|
||||
parent_dir = self.log_file_path.parent
|
||||
if not parent_dir.exists():
|
||||
parent_dir.mkdir(parents=True, exist_ok=True)
|
||||
self.log_file = self.log_file_path.open("a", encoding="utf-8")
|
||||
|
||||
def _should_rotate(self) -> bool:
|
||||
return self.log_file_path.exists() and self.log_file_path.stat().st_size >= self.rotation_size
|
||||
|
||||
def _rotate(self):
|
||||
self.log_file.close()
|
||||
timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
rotated_path = self.log_file_path.with_name(f"{self.log_file_path.stem}_{timestamp}{self.log_file_path.suffix}")
|
||||
self.log_file_path.rename(rotated_path)
|
||||
self._cleanup_old_backups()
|
||||
self._open_log_file()
|
||||
|
||||
def _cleanup_old_backups(self):
|
||||
parent = self.log_file_path.parent
|
||||
stem = self.log_file_path.stem
|
||||
suffix = self.log_file_path.suffix
|
||||
|
||||
backup_files = sorted(
|
||||
parent.glob(f"{stem}_*{suffix}"),
|
||||
key=lambda p: p.stat().st_mtime,
|
||||
reverse=True,
|
||||
)
|
||||
|
||||
for old_file in backup_files[self.max_backup_files:]:
|
||||
try:
|
||||
old_file.unlink()
|
||||
except Exception as e:
|
||||
print(f"Failed to delete old backup {old_file}: {e}")
|
||||
|
||||
def __call__(self, message):
|
||||
record = message.record
|
||||
if self._should_rotate():
|
||||
self._rotate()
|
||||
|
||||
log_entry = {
|
||||
"level": record["level"].name.lower(),
|
||||
"timestamp": int(record["time"].timestamp() * 1000),
|
||||
"text": record["message"],
|
||||
"fields": record["extra"].get("properties", {}),
|
||||
"context": {
|
||||
"app": record["extra"].get("app"),
|
||||
"env": record["extra"].get("env"),
|
||||
"log_file": record["extra"].get("log_file"),
|
||||
"log_line": record["extra"].get("log_line"),
|
||||
"topic": record["extra"].get("topic"),
|
||||
"sender_id": record["extra"].get("sender_id"),
|
||||
"receiver_id": record["extra"].get("receiver_id"),
|
||||
"subject": record["extra"].get("subject"),
|
||||
"event": record["extra"].get("event"),
|
||||
"host_ip": record["extra"].get("host_ip"),
|
||||
"host_name": record["extra"].get("host_name"),
|
||||
},
|
||||
"stacktrace": None
|
||||
}
|
||||
|
||||
if record["exception"]:
|
||||
exc_type, exc_value, exc_tb = record["exception"]
|
||||
log_entry["stacktrace"] = traceback.format_exception(exc_type, exc_value, exc_tb)
|
||||
|
||||
self.log_file.write(json.dumps(log_entry, ensure_ascii=False, default=str) + "\n")
|
||||
self.log_file.flush()
|
||||
25
common/log/log_utils.py
Normal file
25
common/log/log_utils.py
Normal file
@ -0,0 +1,25 @@
|
||||
import os
|
||||
from .function_logger import FunctionLogger
|
||||
import time
|
||||
import functools
|
||||
|
||||
|
||||
def log_entry_exit_async(func):
|
||||
@functools.wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
file_path = os.path.relpath(func.__code__.co_filename)
|
||||
function_logger = FunctionLogger(sender_id="log_entry_exit_async", receiver_id="function_logger")
|
||||
start_time = time.process_time_ns()
|
||||
try:
|
||||
await function_logger.log_enter(func.__name__, file_path)
|
||||
result = await func(*args, **kwargs)
|
||||
await function_logger.log_exit(func.__name__, file_path, time.process_time_ns() - start_time)
|
||||
return result
|
||||
except Exception as exception:
|
||||
await function_logger.log_exception(
|
||||
exception=exception,
|
||||
function=func.__name__,
|
||||
file=file_path,
|
||||
excution_time_in_ns=time.process_time_ns() - start_time)
|
||||
raise
|
||||
return wrapper
|
||||
46
common/log/module_logger.py
Normal file
46
common/log/module_logger.py
Normal file
@ -0,0 +1,46 @@
|
||||
from .application_logger import ApplicationLogger
|
||||
|
||||
|
||||
class ModuleLogger(ApplicationLogger):
|
||||
def __init__(self, sender_id: str) -> None:
|
||||
super().__init__()
|
||||
self.event_sender_id = sender_id
|
||||
self.event_receiver_id = "ModuleLogger"
|
||||
self.event_subject = "module"
|
||||
|
||||
async def log_exception(self, exception: Exception, text: str = "Exception", properties: dict[str, any] = None) -> None:
|
||||
return await super().log_exception(
|
||||
sender_id=self.event_sender_id,
|
||||
receiver_id=self.event_receiver_id,
|
||||
subject=self.event_subject,
|
||||
exception=exception,
|
||||
text=text,
|
||||
properties=properties,
|
||||
)
|
||||
|
||||
async def log_info(self, info: str, properties: dict[str, any] = None) -> None:
|
||||
return await super().log_info(
|
||||
sender_id=self.event_sender_id,
|
||||
receiver_id=self.event_receiver_id,
|
||||
subject=self.event_subject,
|
||||
text=info,
|
||||
properties=properties,
|
||||
)
|
||||
|
||||
async def log_warning(self, warning: str, properties: dict[str, any] = None) -> None:
|
||||
return await super().log_warning(
|
||||
sender_id=self.event_sender_id,
|
||||
receiver_id=self.event_receiver_id,
|
||||
subject=self.event_subject,
|
||||
text=warning,
|
||||
properties=properties,
|
||||
)
|
||||
|
||||
async def log_error(self, error: str, properties: dict[str, any] = None) -> None:
|
||||
return await super().log_error(
|
||||
sender_id=self.event_sender_id,
|
||||
receiver_id=self.event_receiver_id,
|
||||
subject=self.event_subject,
|
||||
text=error,
|
||||
properties=properties,
|
||||
)
|
||||
14
common/log/user_logger.py
Normal file
14
common/log/user_logger.py
Normal file
@ -0,0 +1,14 @@
|
||||
from .base_logger import LoggerBase
|
||||
from common.config.app_settings import app_settings
|
||||
|
||||
import json
|
||||
|
||||
|
||||
class UserLogger(LoggerBase):
|
||||
def __init__(self, user_activities: dict[str, any] = {}) -> None:
|
||||
extra_fileds = {}
|
||||
if user_activities:
|
||||
extra_fileds.update(user_activities)
|
||||
super().__init__(
|
||||
logger_name=app_settings.USER_ACTIVITY_LOG, extra_fileds=extra_fileds
|
||||
)
|
||||
140
common/probes/__init__.py
Normal file
140
common/probes/__init__.py
Normal file
@ -0,0 +1,140 @@
|
||||
import logging
|
||||
from enum import Enum
|
||||
from typing import Optional, Callable, Tuple, Dict
|
||||
import inspect
|
||||
from datetime import datetime, timezone
|
||||
|
||||
# ProbeType is an Enum that defines the types of probes that can be registered.
|
||||
class ProbeType(Enum):
|
||||
LIVENESS = "liveness"
|
||||
READINESS = "readiness"
|
||||
STARTUP = "startup"
|
||||
|
||||
# ProbeResult is a class that represents the result of a probe check.
|
||||
class ProbeResult:
|
||||
def __init__(self, success: bool, message: str = "ok", data: Optional[dict] = None):
|
||||
self.success = success
|
||||
self.message = message
|
||||
self.data = data or {}
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
"success": self.success,
|
||||
"message": self.message,
|
||||
"data": self.data
|
||||
}
|
||||
|
||||
# Probe is a class that represents a probe that can be registered.
|
||||
class Probe:
|
||||
def __init__(self, type: ProbeType, path: str, check_fn: Callable, name: Optional[str] = None):
|
||||
self.type = type
|
||||
self.path = path
|
||||
self.check_fn = check_fn
|
||||
self.name = name or f"{type.value}-{id(self)}"
|
||||
|
||||
async def execute(self) -> ProbeResult:
|
||||
try:
|
||||
result = self.check_fn()
|
||||
if inspect.isawaitable(result):
|
||||
result = await result
|
||||
|
||||
if isinstance(result, ProbeResult):
|
||||
return result
|
||||
elif isinstance(result, bool):
|
||||
return ProbeResult(result, "ok" if result else "failed")
|
||||
else:
|
||||
return ProbeResult(True, "ok")
|
||||
except Exception as e:
|
||||
return ProbeResult(False, str(e))
|
||||
|
||||
# ProbeGroup is a class that represents a group of probes that can be checked together.
|
||||
class ProbeGroup:
|
||||
def __init__(self, path: str):
|
||||
self.path = path
|
||||
self.probes: Dict[str, Probe] = {}
|
||||
|
||||
def add_probe(self, probe: Probe):
|
||||
self.probes[probe.name] = probe
|
||||
|
||||
async def check_all(self) -> Tuple[bool, dict]:
|
||||
results = {}
|
||||
all_success = True
|
||||
|
||||
for name, probe in self.probes.items():
|
||||
result = await probe.execute()
|
||||
results[name] = result.to_dict()
|
||||
if not result.success:
|
||||
all_success = False
|
||||
|
||||
return all_success, results
|
||||
|
||||
# FrameworkAdapter is an abstract class that defines the interface for framework-specific probe adapters.
|
||||
class FrameworkAdapter:
|
||||
async def handle_request(self, group: ProbeGroup):
|
||||
all_success, results = await group.check_all()
|
||||
status_code = 200 if all_success else 503
|
||||
return {"status": "ok" if all_success else "failed", "payload": results, "timestamp": int(datetime.now(timezone.utc).timestamp())}, status_code
|
||||
|
||||
def register_route(self, path: str, handler: Callable):
|
||||
raise NotImplementedError
|
||||
|
||||
# ProbeManager is a class that manages the registration of probes and their corresponding framework adapters.
|
||||
class ProbeManager:
|
||||
_default_paths = {
|
||||
ProbeType.LIVENESS: "/_/livez",
|
||||
ProbeType.READINESS: "/_/readyz",
|
||||
ProbeType.STARTUP: "/_/healthz"
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
self.groups: Dict[str, ProbeGroup] = {}
|
||||
self.adapters: Dict[str, FrameworkAdapter] = {}
|
||||
self._startup_complete = False
|
||||
|
||||
def register_adapter(self, framework: str, adapter: FrameworkAdapter):
|
||||
self.adapters[framework] = adapter
|
||||
logging.info(f"Registered probe adapter ({adapter}) for framework: {framework}")
|
||||
|
||||
def register(
|
||||
self,
|
||||
type: ProbeType,
|
||||
check_func: Optional[Callable] = None,
|
||||
path: Optional[str] = None,
|
||||
prefix: str = "",
|
||||
name: Optional[str] = None,
|
||||
frameworks: Optional[list] = None
|
||||
):
|
||||
path = path or self._default_paths.get(type, "/_/healthz")
|
||||
if prefix:
|
||||
path = f"{prefix}{path}"
|
||||
|
||||
if type == ProbeType.STARTUP and check_func is None:
|
||||
check_func = self._default_startup_check
|
||||
|
||||
probe = Probe(type, path, check_func or (lambda: True), name)
|
||||
|
||||
if path not in self.groups:
|
||||
self.groups[path] = ProbeGroup(path)
|
||||
self.groups[path].add_probe(probe)
|
||||
|
||||
for framework in (frameworks or ["default"]):
|
||||
self._register_route(framework, path)
|
||||
logging.info(f"Registered {type.value} probe route ({path}) for framework: {framework}")
|
||||
|
||||
def _register_route(self, framework: str, path: str):
|
||||
if framework not in self.adapters:
|
||||
return
|
||||
|
||||
adapter = self.adapters[framework]
|
||||
group = self.groups[path]
|
||||
|
||||
async def handler():
|
||||
return await adapter.handle_request(group)
|
||||
|
||||
adapter.register_route(path, handler)
|
||||
|
||||
def _default_startup_check(self) -> bool:
|
||||
return self._startup_complete
|
||||
|
||||
def mark_startup_complete(self):
|
||||
self._startup_complete = True
|
||||
15
common/probes/adapters.py
Normal file
15
common/probes/adapters.py
Normal file
@ -0,0 +1,15 @@
|
||||
from . import FrameworkAdapter
|
||||
from fastapi.responses import JSONResponse
|
||||
from typing import Callable
|
||||
|
||||
# FastAPIAdapter is a class that implements the FrameworkAdapter interface for FastAPI.
|
||||
class FastAPIAdapter(FrameworkAdapter):
|
||||
def __init__(self, app):
|
||||
self.app = app
|
||||
|
||||
def register_route(self,path: str, handler: Callable):
|
||||
async def wrapper():
|
||||
data, status_code = await handler()
|
||||
return JSONResponse(content=data, status_code=status_code)
|
||||
|
||||
self.app.add_api_route(path, wrapper, methods=["GET"])
|
||||
130
common/token/token_manager.py
Normal file
130
common/token/token_manager.py
Normal file
@ -0,0 +1,130 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import uuid
|
||||
from typing import Dict, List
|
||||
from jose import jwt, JWTError
|
||||
from common.config.app_settings import app_settings
|
||||
from fastapi import Depends, HTTPException
|
||||
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
||||
|
||||
from common.constants.jwt_constants import USER_ROLE_NAMES, USER_PERMISSIONS
|
||||
|
||||
|
||||
class CurrentUser:
|
||||
def __init__(self, user_id: str, user_role_names: List[str], user_permission_keys: List[str]):
|
||||
self.user_id = user_id
|
||||
self.user_role_names = user_role_names
|
||||
self.user_permission_keys = user_permission_keys
|
||||
|
||||
def has_all_permissions(self, permissions: List[str]) -> bool:
|
||||
"""Check if the user has all the specified permissions"""
|
||||
if not permissions:
|
||||
return True
|
||||
return all(p in self.user_permission_keys for p in permissions)
|
||||
|
||||
def has_any_permissions(self, permissions: List[str]) -> bool:
|
||||
"""Check if the user has at least one of the specified permissions"""
|
||||
if not permissions:
|
||||
return True
|
||||
return any(p in self.user_permission_keys for p in permissions)
|
||||
|
||||
|
||||
security = HTTPBearer()
|
||||
|
||||
|
||||
class TokenManager:
|
||||
def __init__(self):
|
||||
self.secret_key = app_settings.JWT_SECRET_KEY
|
||||
self.algorithm = app_settings.JWT_ALGORITHM
|
||||
self.access_token_expire_minutes = app_settings.ACCESS_TOKEN_EXPIRE_MINUTES
|
||||
self.refresh_token_expire_days = app_settings.REFRESH_TOKEN_EXPIRE_DAYS
|
||||
|
||||
def create_access_token(self, subject: Dict[str, str]) -> str:
|
||||
"""
|
||||
Generates an access token with a short expiration time.
|
||||
"""
|
||||
expire = datetime.now(timezone.utc) + timedelta(
|
||||
minutes=self.access_token_expire_minutes
|
||||
)
|
||||
to_encode = {
|
||||
"exp": expire,
|
||||
"subject": subject, # User identity information
|
||||
"type": "access", # Indicate token type
|
||||
}
|
||||
return jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||
|
||||
def create_refresh_token(self, subject: Dict[str, str]) -> str:
|
||||
"""
|
||||
Generates a refresh token with a longer expiration time.
|
||||
"""
|
||||
expire = datetime.now(timezone.utc) + timedelta(
|
||||
days=self.refresh_token_expire_days
|
||||
)
|
||||
to_encode = {
|
||||
"exp": expire,
|
||||
"subject": subject, # User identity information
|
||||
"type": "refresh", # Indicate token type
|
||||
"jti": str(uuid.uuid4()), # Unique identifier for the refresh token
|
||||
}
|
||||
return jwt.encode(to_encode, self.secret_key, algorithm=self.algorithm)
|
||||
|
||||
def decode_token(self, token: str) -> Dict:
|
||||
"""
|
||||
Decodes a JWT token and returns the payload.
|
||||
"""
|
||||
try:
|
||||
payload = jwt.decode(token, self.secret_key, algorithms=[self.algorithm])
|
||||
return payload
|
||||
except JWTError:
|
||||
raise ValueError("Invalid token")
|
||||
|
||||
def verify_refresh_token(self, token: str) -> bool:
|
||||
"""
|
||||
Verifies a refresh token to ensure it is valid and not expired.
|
||||
"""
|
||||
try:
|
||||
payload = self.decode_token(token)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
def refresh_access_token(self, refresh_token: str, subject: Dict[str, str]) -> str:
|
||||
"""
|
||||
Verifies the refresh token and creates a new access token.
|
||||
"""
|
||||
if self.verify_refresh_token(refresh_token):
|
||||
return self.create_access_token(subject)
|
||||
else:
|
||||
raise ValueError("Invalid refresh token")
|
||||
|
||||
async def get_current_user(self, credentials: HTTPAuthorizationCredentials = Depends(security)) -> CurrentUser:
|
||||
"""
|
||||
Returns the current user object for the given credentials.
|
||||
"""
|
||||
try:
|
||||
payload = self.decode_token(credentials.credentials)
|
||||
user = payload.get("subject")
|
||||
if not user or "id" not in user:
|
||||
raise HTTPException(status_code=401, detail="Invalid authentication token")
|
||||
return CurrentUser(user.get("id"), user.get(USER_ROLE_NAMES), user.get(USER_PERMISSIONS))
|
||||
except JWTError:
|
||||
raise HTTPException(status_code=401, detail="Invalid authentication token")
|
||||
|
||||
def has_all_permissions(self, permissions: List[str]):
|
||||
"""Check if the user has all the specified permissions"""
|
||||
|
||||
def inner_dependency(current_user: CurrentUser = Depends(self.get_current_user)):
|
||||
if not current_user.has_all_permissions(permissions):
|
||||
raise HTTPException(status_code=403, detail="Not allowed")
|
||||
return True
|
||||
|
||||
return inner_dependency
|
||||
|
||||
def has_any_permissions(self, permissions: List[str]):
|
||||
"""Check if the user has at least one of the specified permissions"""
|
||||
|
||||
def inner_dependency(current_user: CurrentUser = Depends(self.get_current_user)):
|
||||
if not current_user.has_any_permissions(permissions):
|
||||
raise HTTPException(status_code=403, detail="Not allowed")
|
||||
return True
|
||||
|
||||
return inner_dependency
|
||||
22
common/utils/date.py
Normal file
22
common/utils/date.py
Normal file
@ -0,0 +1,22 @@
|
||||
import datetime
|
||||
from datetime import timedelta, timezone
|
||||
|
||||
|
||||
def get_sunday(date):
|
||||
return date - datetime.timedelta(days=date.weekday()) + timedelta(days=6)
|
||||
|
||||
|
||||
def get_last_sunday_dates(number, include_current_week=True):
|
||||
now_utc = datetime.datetime.now(timezone.utc)
|
||||
today = datetime.datetime(now_utc.year, now_utc.month, now_utc.day)
|
||||
if include_current_week:
|
||||
days_to_last_sunday = (6 - today.weekday()) % 7
|
||||
last_sunday = today + datetime.timedelta(days=days_to_last_sunday)
|
||||
else:
|
||||
days_to_last_sunday = (today.weekday() - 6) % 7
|
||||
last_sunday = today - datetime.timedelta(days=days_to_last_sunday)
|
||||
last_n_sundays = []
|
||||
for i in range(number):
|
||||
sunday = last_sunday - datetime.timedelta(days=i * 7)
|
||||
last_n_sundays.append(sunday.date())
|
||||
return last_n_sundays
|
||||
13
common/utils/region.py
Normal file
13
common/utils/region.py
Normal file
@ -0,0 +1,13 @@
|
||||
from common.constants.region import UserRegion
|
||||
|
||||
|
||||
class RegionHandler:
|
||||
def __init__(self):
|
||||
self._zh_cn_patterns = [".cn", "cn.", "host"]
|
||||
|
||||
def detect_from_host(self, host: str) -> UserRegion:
|
||||
# Now we set user preferred region based on host
|
||||
for parttern in self._zh_cn_patterns:
|
||||
if parttern in host.lower():
|
||||
return UserRegion.ZH_CN
|
||||
return UserRegion.OTHER
|
||||
87
common/utils/string.py
Normal file
87
common/utils/string.py
Normal file
@ -0,0 +1,87 @@
|
||||
import random
|
||||
import re
|
||||
import jieba
|
||||
from typing import List
|
||||
|
||||
SKILL_TAGS = [
|
||||
"C++",
|
||||
"Java",
|
||||
"Python",
|
||||
"TypeScript",
|
||||
"iOS",
|
||||
"Android",
|
||||
"Web",
|
||||
"Javascript",
|
||||
"Vue",
|
||||
"Go",
|
||||
]
|
||||
|
||||
# dynamically update skill tags? maybe based on the most commonly extracted keywords to help the system adapt to change
|
||||
def updateSkillTags(string):
|
||||
SKILL_TAGS.append(string)
|
||||
|
||||
|
||||
def generate_auth_code():
|
||||
filtered = "0123456789"
|
||||
code = "".join(random.choice(filtered) for i in range(6))
|
||||
return code
|
||||
|
||||
|
||||
# TODO: Need to optimize
|
||||
def generate_self_intro_summary(content_html: str) -> str:
|
||||
element_html = re.compile("<.*?>")
|
||||
content_text = re.sub(element_html, "", content_html).strip()
|
||||
return content_text[:50]
|
||||
|
||||
|
||||
# TODO: Need to optimize
|
||||
def extract_skill_tags(content_html: str) -> List[str]:
|
||||
element_html = re.compile("<.*?>")
|
||||
content_text = re.sub(element_html, "", content_html).strip()
|
||||
words = set([word.lower() for word in jieba.cut(content_text) if word.strip()])
|
||||
|
||||
results = []
|
||||
for tag in SKILL_TAGS:
|
||||
if tag.lower() in words:
|
||||
results.append(tag)
|
||||
return results
|
||||
|
||||
|
||||
def extract_title(content_html: str) -> List[str]:
|
||||
element_html = re.compile("<.*?>")
|
||||
content_text = re.sub(element_html, "\n", content_html).strip()
|
||||
|
||||
cut_point_indexes = []
|
||||
for cut_point in [".", ",", ";", "\r", "\n"]:
|
||||
result = content_text.find(cut_point)
|
||||
if result > 0:
|
||||
cut_point_indexes.append(result)
|
||||
|
||||
title = (
|
||||
content_text[: min(cut_point_indexes)]
|
||||
if len(cut_point_indexes) > 0
|
||||
else content_text
|
||||
)
|
||||
return title
|
||||
|
||||
|
||||
def check_password_complexity(password):
|
||||
lowercase_pattern = r"[a-z]"
|
||||
uppercase_pattern = r"[A-Z]"
|
||||
digit_pattern = r"\d"
|
||||
special_pattern = r'[!@#$%^&*(),.?":{}|<>]'
|
||||
|
||||
password_lowercase_one = bool(re.search(lowercase_pattern, password))
|
||||
password_uppercase_one = bool(re.search(uppercase_pattern, password))
|
||||
password_digit_one = bool(re.search(digit_pattern, password))
|
||||
password_special_one = bool(re.search(special_pattern, password))
|
||||
|
||||
if (
|
||||
password_lowercase_one
|
||||
and password_uppercase_one
|
||||
and password_digit_one
|
||||
and password_special_one
|
||||
):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
14
local.env
Normal file
14
local.env
Normal file
@ -0,0 +1,14 @@
|
||||
APP_NAME=authentication
|
||||
SERVER_HOST=0.0.0.0
|
||||
SERVER_PORT=7900
|
||||
AUTH_SERVICE_ENDPOINT=http://localhost:9000/api/v1/
|
||||
AUTH_SERVICE_PORT=9000
|
||||
CONTAINER_APP_ROOT=/app
|
||||
BACKEND_LOG_FILE_NAME=$APP_NAME
|
||||
APPLICATION_ACTIVITY_LOG=authentication-activity
|
||||
MONGODB_URI=mongodb://localhost:27017/
|
||||
MONGODB_NAME=freeleaps2
|
||||
MONGODB_PORT=27017
|
||||
TENANT_CACHE_MAX=64
|
||||
JWT_SECRET_KEY=ea84edf152976b2fcec12b78aa8e45bc26a5cf0ef61bf16f5c317ae33b3fd8b0
|
||||
|
||||
16
main.py
16
main.py
@ -1,16 +0,0 @@
|
||||
from app.setup_app import create_app
|
||||
from app.utils.config import settings
|
||||
from app.utils.logger import logger
|
||||
|
||||
app = create_app()
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
logger.info(f"Starting server on {settings.UVICORN_HOST}:{settings.UVICORN_PORT}...")
|
||||
uvicorn.run(
|
||||
'main:app',
|
||||
host=settings.UVICORN_HOST,
|
||||
port=settings.UVICORN_PORT,
|
||||
reload=settings.is_development(),
|
||||
)
|
||||
@ -1,4 +1,18 @@
|
||||
fastapi==0.115.12
|
||||
starlette==0.46.2
|
||||
pydantic_settings==2.9.1
|
||||
uvicorn==0.34.2
|
||||
fastapi==0.114.0
|
||||
fastapi-mail==1.4.1
|
||||
fastapi-jwt==0.2.0
|
||||
pika==1.3.2
|
||||
pydantic==2.9.2
|
||||
loguru==0.7.2
|
||||
uvicorn==0.23.2
|
||||
beanie==1.21.0
|
||||
jieba==0.42.1
|
||||
sendgrid
|
||||
aio-pika
|
||||
httpx
|
||||
pydantic-settings
|
||||
python-jose
|
||||
passlib[bcrypt]
|
||||
prometheus-fastapi-instrumentator==7.0.2
|
||||
pytest==8.4.1
|
||||
pytest-asyncio==0.21.2
|
||||
26
start_fastapi.sh
Executable file
26
start_fastapi.sh
Executable file
@ -0,0 +1,26 @@
|
||||
#! /bin/bash
|
||||
rp=$(dirname "$(realpath '$1'))")
|
||||
pushd $rp
|
||||
|
||||
APP_NAME=authentication
|
||||
VENV_DIR=venv
|
||||
|
||||
. .env
|
||||
|
||||
if [ -d "$VENV_DIR" ]
|
||||
then
|
||||
echo "Folder $VENV_DIR exists. Proceed to next steps"
|
||||
else
|
||||
echo "Folder $VENV_DIR dosen't exist. create it"
|
||||
sudo apt install python3-pip
|
||||
python3 -m pip install virtualenv
|
||||
python3 -m virtualenv $VENV_DIR
|
||||
fi
|
||||
|
||||
source $VENV_DIR/bin/activate
|
||||
pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
set -a; source local.env; set +a
|
||||
|
||||
uvicorn webapi.main:app --reload --host 0.0.0.0 --port $SERVER_PORT
|
||||
popd
|
||||
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
0
tests/api_tests/__init__.py
Normal file
0
tests/api_tests/__init__.py
Normal file
86
tests/api_tests/permission/README.md
Normal file
86
tests/api_tests/permission/README.md
Normal file
@ -0,0 +1,86 @@
|
||||
# Permission API Test Report
|
||||
|
||||
## How to Run the Tests
|
||||
|
||||
**Run all permission API tests with coverage:**
|
||||
```bash
|
||||
pytest --cov=authentication --cov-report=term-missing tests/api_tests/permission/
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Test Results Summary
|
||||
|
||||
- **Total tests collected:** 26
|
||||
- **All tests passed.**
|
||||
- **Warnings:**
|
||||
- Deprecation warnings from Pydantic/Beanie (upgrade recommended for future compatibility).
|
||||
- Coverage warning: `Module authentication was never imported. (module-not-imported)`
|
||||
|
||||
---
|
||||
|
||||
## Test Case Explanations
|
||||
|
||||
### test_create_permission.py
|
||||
|
||||
- **test_create_permission_success**
|
||||
Admin user can create a permission with valid data.
|
||||
- **test_create_permission_fail_duplicate_key/name**
|
||||
Creating a permission with duplicate key or name fails.
|
||||
- **test_create_permission_fail_empty_key/name**
|
||||
Creating a permission with empty key or name fails.
|
||||
- **test_create_permission_success_empty_description**
|
||||
Description is optional.
|
||||
- **test_create_permission_fail_by_non_admin**
|
||||
Non-admin user cannot create permissions.
|
||||
- **test_create_permission_success_after_grant_admin**
|
||||
After admin grants admin role to a temp user and the user re-logs in, the user can create permissions.
|
||||
|
||||
### test_delete_permission.py
|
||||
|
||||
- **test_delete_permission_success**
|
||||
Admin user can delete a permission.
|
||||
- **test_delete_permission_fail_not_found**
|
||||
Deleting a non-existent permission fails.
|
||||
- **test_delete_default_permission_fail**
|
||||
Default permissions cannot be deleted.
|
||||
- **test_delete_permission_fail_by_non_admin**
|
||||
Non-admin user cannot delete permissions.
|
||||
- **test_delete_permission_success_after_grant_admin**
|
||||
After admin grants admin role to a temp user and the user re-logs in, the user can delete permissions.
|
||||
|
||||
### test_update_permission.py
|
||||
|
||||
- **test_update_permission_success**
|
||||
Admin user can update a permission.
|
||||
- **test_update_permission_fail_not_found**
|
||||
Updating a non-existent permission fails.
|
||||
- **test_update_permission_fail_duplicate_key/name**
|
||||
Updating to a duplicate key or name fails.
|
||||
- **test_update_permission_fail_empty_key/name**
|
||||
Updating with empty key or name fails.
|
||||
- **test_update_default_permission_fail**
|
||||
Default permissions cannot be updated.
|
||||
- **test_update_permission_fail_by_non_admin**
|
||||
Non-admin user cannot update permissions.
|
||||
- **test_update_permission_success_after_grant_admin**
|
||||
After admin grants admin role to a temp user and the user re-logs in, the user can update permissions.
|
||||
|
||||
### test_query_permission.py
|
||||
|
||||
- **test_query_all_permissions**
|
||||
Query all permissions, expect a list.
|
||||
- **test_query_permissions_by_key/name**
|
||||
Query permissions by key or name (fuzzy search).
|
||||
- **test_query_permissions_pagination**
|
||||
Query permissions with pagination.
|
||||
|
||||
---
|
||||
|
||||
## Summary
|
||||
|
||||
- These tests ensure that only admin users can manage permissions, and that permission can be delegated by granting the admin role to other users.
|
||||
- Each test case is designed to verify both positive and negative scenarios, including permission escalation and proper error handling.
|
||||
- **Coverage reporting is not working** due to import or execution issues—fix this for a complete report.
|
||||
|
||||
---
|
||||
0
tests/api_tests/permission/__init__.py
Normal file
0
tests/api_tests/permission/__init__.py
Normal file
21
tests/api_tests/permission/conftest.py
Normal file
21
tests/api_tests/permission/conftest.py
Normal file
@ -0,0 +1,21 @@
|
||||
import pytest
|
||||
|
||||
from tests.base.authentication_web import AuthenticationWeb
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def authentication_web() -> AuthenticationWeb:
|
||||
authentication_web = AuthenticationWeb()
|
||||
authentication_web.login()
|
||||
return authentication_web
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def authentication_web_of_temp_user1() -> AuthenticationWeb:
|
||||
authentication_web = AuthenticationWeb()
|
||||
user = authentication_web.create_temporary_user()
|
||||
authentication_web.user_email = user["email"]
|
||||
authentication_web.password = user["password"]
|
||||
authentication_web.user_id = user["user_id"]
|
||||
authentication_web.login()
|
||||
return authentication_web
|
||||
143
tests/api_tests/permission/test_create_permission.py
Normal file
143
tests/api_tests/permission/test_create_permission.py
Normal file
@ -0,0 +1,143 @@
|
||||
import pytest
|
||||
import random
|
||||
from tests.base.authentication_web import AuthenticationWeb
|
||||
|
||||
|
||||
class TestCreatePermission:
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_permission_success(self, authentication_web: AuthenticationWeb):
|
||||
"""Test creating a permission successfully with valid and unique permission_key and permission_name."""
|
||||
suffix = str(random.randint(10000, 99999))
|
||||
perm_data = {
|
||||
"permission_key": f"test_perm_key_success_{suffix}",
|
||||
"permission_name": f"Test Permission Success {suffix}",
|
||||
"description": "Permission for testing success"
|
||||
}
|
||||
response = await authentication_web.create_permission(perm_data)
|
||||
assert response.status_code == 200
|
||||
json = response.json()
|
||||
assert json["permission_key"] == perm_data["permission_key"]
|
||||
assert json["permission_name"] == perm_data["permission_name"]
|
||||
assert json["description"] == perm_data["description"]
|
||||
assert json["id"] is not None
|
||||
assert json["created_at"] is not None
|
||||
assert json["updated_at"] is not None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_permission_fail_duplicate_key(self, authentication_web: AuthenticationWeb):
|
||||
"""Test creating a permission fails when permission_key is duplicated."""
|
||||
suffix = str(random.randint(10000, 99999))
|
||||
perm_data = {
|
||||
"permission_key": f"test_perm_key_dup_{suffix}",
|
||||
"permission_name": f"Test Permission DupKey {suffix}",
|
||||
"description": "desc"
|
||||
}
|
||||
await authentication_web.create_permission(perm_data)
|
||||
perm_data2 = {
|
||||
"permission_key": f"test_perm_key_dup_{suffix}",
|
||||
"permission_name": f"Test Permission DupKey2 {suffix}",
|
||||
"description": "desc2"
|
||||
}
|
||||
response = await authentication_web.create_permission(perm_data2)
|
||||
assert response.status_code == 422 or response.status_code == 400
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_permission_fail_duplicate_name(self, authentication_web: AuthenticationWeb):
|
||||
"""Test creating a permission fails when permission_name is duplicated."""
|
||||
suffix = str(random.randint(10000, 99999))
|
||||
perm_data = {
|
||||
"permission_key": f"test_perm_key_dupname1_{suffix}",
|
||||
"permission_name": f"Test Permission DupName {suffix}",
|
||||
"description": "desc"
|
||||
}
|
||||
await authentication_web.create_permission(perm_data)
|
||||
perm_data2 = {
|
||||
"permission_key": f"test_perm_key_dupname2_{suffix}",
|
||||
"permission_name": f"Test Permission DupName {suffix}",
|
||||
"description": "desc2"
|
||||
}
|
||||
response = await authentication_web.create_permission(perm_data2)
|
||||
assert response.status_code == 422 or response.status_code == 400
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_permission_fail_empty_key(self, authentication_web: AuthenticationWeb):
|
||||
"""Test creating a permission fails when permission_key is empty."""
|
||||
suffix = str(random.randint(10000, 99999))
|
||||
perm_data = {
|
||||
"permission_key": "",
|
||||
"permission_name": f"Test Permission EmptyKey {suffix}",
|
||||
"description": "desc"
|
||||
}
|
||||
response = await authentication_web.create_permission(perm_data)
|
||||
assert response.status_code == 422 or response.status_code == 400
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_permission_fail_empty_name(self, authentication_web: AuthenticationWeb):
|
||||
"""Test creating a permission fails when permission_name is empty."""
|
||||
suffix = str(random.randint(10000, 99999))
|
||||
perm_data = {
|
||||
"permission_key": f"test_perm_key_emptyname_{suffix}",
|
||||
"permission_name": "",
|
||||
"description": "desc"
|
||||
}
|
||||
response = await authentication_web.create_permission(perm_data)
|
||||
assert response.status_code == 422 or response.status_code == 400
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_permission_success_empty_description(self, authentication_web: AuthenticationWeb):
|
||||
"""Test creating a permission successfully when description is None (optional field)."""
|
||||
suffix = str(random.randint(10000, 99999))
|
||||
perm_data = {
|
||||
"permission_key": f"test_perm_key_emptydesc_{suffix}",
|
||||
"permission_name": f"Test Permission EmptyDesc {suffix}",
|
||||
"description": None
|
||||
}
|
||||
response = await authentication_web.create_permission(perm_data)
|
||||
assert response.status_code == 200
|
||||
json = response.json()
|
||||
assert json["permission_key"] == perm_data["permission_key"]
|
||||
assert json["permission_name"] == perm_data["permission_name"]
|
||||
assert json["description"] is None or json["description"] == ""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_permission_fail_by_non_admin(self, authentication_web_of_temp_user1: AuthenticationWeb):
|
||||
"""Test creating a permission fails by non-admin user (no permission)."""
|
||||
suffix = str(random.randint(10000, 99999))
|
||||
perm_data = {
|
||||
"permission_key": f"test_perm_key_nonadmin_{suffix}",
|
||||
"permission_name": f"Test Permission NonAdmin {suffix}",
|
||||
"description": "desc"
|
||||
}
|
||||
response = await authentication_web_of_temp_user1.create_permission(perm_data)
|
||||
assert response.status_code == 403 or response.status_code == 401
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_permission_success_after_grant_admin(self, authentication_web: AuthenticationWeb):
|
||||
"""Test creating a permission succeeds after granting admin role to a new temporary user and re-login."""
|
||||
# Create a new temp user
|
||||
user = authentication_web.create_temporary_user()
|
||||
temp_authentication_web = AuthenticationWeb(user_email=user["email"], password=user["password"])
|
||||
temp_authentication_web.user_id = user["user_id"]
|
||||
temp_authentication_web.login()
|
||||
# Grant admin role to temp user
|
||||
resp = await authentication_web.query_roles({"role_key": "admin"})
|
||||
admin_role_id = resp.json()["items"][0]["id"]
|
||||
await authentication_web.assign_roles_to_user({
|
||||
"user_id": temp_authentication_web.user_id,
|
||||
"role_ids": [admin_role_id]
|
||||
})
|
||||
# Re-login as temp user
|
||||
temp_authentication_web.login()
|
||||
# Try to create permission
|
||||
suffix = str(random.randint(10000, 99999))
|
||||
perm_data = {
|
||||
"permission_key": f"test_perm_key_tempadmin_{suffix}",
|
||||
"permission_name": f"Test Permission TempAdmin {suffix}",
|
||||
"description": "desc"
|
||||
}
|
||||
response = await temp_authentication_web.create_permission(perm_data)
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
pytest.main([__file__])
|
||||
85
tests/api_tests/permission/test_delete_permission.py
Normal file
85
tests/api_tests/permission/test_delete_permission.py
Normal file
@ -0,0 +1,85 @@
|
||||
import pytest
|
||||
import random
|
||||
|
||||
from backend.models.permission.constants import DefaultPermissionEnum
|
||||
from tests.base.authentication_web import AuthenticationWeb
|
||||
|
||||
|
||||
class TestDeletePermission:
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_permission_success(self, authentication_web: AuthenticationWeb):
|
||||
"""Test deleting a permission successfully."""
|
||||
suffix = str(random.randint(10000, 99999))
|
||||
perm = await authentication_web.create_permission({
|
||||
"permission_key": f"delperm_{suffix}",
|
||||
"permission_name": f"delperm_{suffix}",
|
||||
"description": "desc"
|
||||
})
|
||||
perm_id = perm.json()["id"]
|
||||
resp = await authentication_web.delete_permission({"permission_id": perm_id})
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["success"] is True
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_permission_fail_not_found(self, authentication_web: AuthenticationWeb):
|
||||
"""Test deleting a permission fails when permission_id does not exist."""
|
||||
resp = await authentication_web.delete_permission({"permission_id": "000000000000000000000000"})
|
||||
assert resp.status_code == 422 or resp.status_code == 400
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_default_permission_fail(self, authentication_web: AuthenticationWeb):
|
||||
"""Test deleting a default permission fails. Default permission cannot be deleted."""
|
||||
# Query a default role
|
||||
resp = await authentication_web.query_permissions(
|
||||
params={"page": 1, "page_size": 2, "permission_key": DefaultPermissionEnum.CHANGE_PERMISSIONS.value.permission_key})
|
||||
json = resp.json()
|
||||
default_permission_id = json["items"][0]["id"]
|
||||
resp = await authentication_web.delete_permission(perm_data={"permission_id": default_permission_id})
|
||||
assert resp.status_code == 422 or resp.status_code == 400
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_permission_fail_by_non_admin(self, authentication_web: AuthenticationWeb, authentication_web_of_temp_user1: AuthenticationWeb):
|
||||
"""Test deleting a permission fails by non-admin user (no permission)."""
|
||||
# Create a permission as admin
|
||||
suffix = str(random.randint(10000, 99999))
|
||||
perm = await authentication_web.create_permission({
|
||||
"permission_key": f"delperm_nonadmin_{suffix}",
|
||||
"permission_name": f"delperm_nonadmin_{suffix}",
|
||||
"description": "desc"
|
||||
})
|
||||
perm_id = perm.json()["id"]
|
||||
# Try to delete as temp user
|
||||
resp = await authentication_web_of_temp_user1.delete_permission({"permission_id": perm_id})
|
||||
assert resp.status_code == 403 or resp.status_code == 401
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_permission_success_after_grant_admin(self, authentication_web: AuthenticationWeb):
|
||||
"""Test deleting a permission succeeds after granting admin role to a new temporary user and re-login."""
|
||||
# Create a new temp user
|
||||
user = authentication_web.create_temporary_user()
|
||||
temp_authentication_web = AuthenticationWeb(user_email=user["email"], password=user["password"])
|
||||
temp_authentication_web.user_id = user["user_id"]
|
||||
temp_authentication_web.login()
|
||||
# Create a permission as admin
|
||||
suffix = str(random.randint(10000, 99999))
|
||||
perm = await authentication_web.create_permission({
|
||||
"permission_key": f"delperm_tempadmin_{suffix}",
|
||||
"permission_name": f"delperm_tempadmin_{suffix}",
|
||||
"description": "desc"
|
||||
})
|
||||
perm_id = perm.json()["id"]
|
||||
# Grant admin role to temp user
|
||||
resp = await authentication_web.query_roles({"role_key": "admin"})
|
||||
admin_role_id = resp.json()["items"][0]["id"]
|
||||
await authentication_web.assign_roles_to_user({
|
||||
"user_id": temp_authentication_web.user_id,
|
||||
"role_ids": [admin_role_id]
|
||||
})
|
||||
# Re-login as temp user
|
||||
temp_authentication_web.login()
|
||||
# Try to delete as temp user
|
||||
resp = await temp_authentication_web.delete_permission({"permission_id": perm_id})
|
||||
assert resp.status_code == 200
|
||||
|
||||
if __name__ == '__main__':
|
||||
pytest.main([__file__])
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user