Compare commits
No commits in common. "397e5b3307247ad8a47c33b8d8e6e7f9133ff1d0" and "c0ab1f15912475c66037d609932fc7dc5af8737d" have entirely different histories.
397e5b3307
...
c0ab1f1591
@ -1,7 +1,6 @@
|
||||
import streamlit as st
|
||||
from config import ENV
|
||||
from utils.translations import _
|
||||
from utils.persistance import get_session_id
|
||||
|
||||
|
||||
def afficher_entete():
|
||||
@ -12,7 +11,7 @@ def afficher_entete():
|
||||
"""
|
||||
|
||||
if ENV == "dev":
|
||||
header += f"<p>🔧 {_("app.dev_mode")} Session : {get_session_id()}</p>"
|
||||
header += f"<p>🔧 {_("app.dev_mode")}</p>"
|
||||
else:
|
||||
header += f"<p>{_("header.subtitle")}</p>"
|
||||
|
||||
|
||||
11
fabnum.py
11
fabnum.py
@ -1,8 +1,5 @@
|
||||
import utils.persistance
|
||||
utils.persistance.update_session_paths()
|
||||
|
||||
import streamlit as st
|
||||
from utils.persistance import get_champ_statut, get_session_id
|
||||
from utils.persistance import get_champ_statut
|
||||
|
||||
st.set_page_config(
|
||||
page_title="Fabnum – Analyse de chaîne",
|
||||
@ -91,6 +88,8 @@ init_translations()
|
||||
# Pour tester d'autres langues, décommenter cette ligne :
|
||||
set_language("fr")
|
||||
|
||||
session_id = st.context.headers.get("x-session-id")
|
||||
|
||||
#
|
||||
# Important
|
||||
# Avec Selinux, il faut mettre les bons droits :
|
||||
@ -98,7 +97,6 @@ set_language("fr")
|
||||
# sudo semanage fcontext -a -t var_log_t '/var/log/nginx/fabnum-public\.access\.log'
|
||||
# sudo restorecon -v /var/log/nginx/fabnum-public.access.log
|
||||
#
|
||||
session_id = get_session_id()
|
||||
def get_total_bytes_for_session(session_id):
|
||||
total_bytes = 0
|
||||
try:
|
||||
@ -160,7 +158,7 @@ def fermer_page():
|
||||
st.markdown("""</section>""", unsafe_allow_html=True)
|
||||
st.markdown("</main>", unsafe_allow_html=True)
|
||||
|
||||
total_bytes = get_total_bytes_for_session(get_session_id())
|
||||
total_bytes = get_total_bytes_for_session(session_id)
|
||||
|
||||
afficher_pied_de_page()
|
||||
afficher_impact(total_bytes)
|
||||
@ -178,6 +176,7 @@ ia_nalyse_tab = _("navigation.ia_nalyse")
|
||||
plan_d_action_tab = _("navigation.plan_d_action")
|
||||
visualisations_tab = _("navigation.visualisations")
|
||||
|
||||
from utils.persistance import get_champ_statut
|
||||
navigation_onglet = get_champ_statut("navigation_onglet")
|
||||
|
||||
if navigation_onglet == instructions_tab:
|
||||
|
||||
@ -3,8 +3,10 @@ services:
|
||||
#---- Private-GPT services ---------
|
||||
#-----------------------------------
|
||||
|
||||
# Private-GPT service for the Ollama CPU and GPU modes
|
||||
# This service builds from an external Dockerfile and runs the Ollama mode.
|
||||
private-gpt-ollama:
|
||||
image: ${PGPT_IMAGE:-zylonai/private-gpt}:${PGPT_TAG:-0.6.2}-ollama
|
||||
image: ${PGPT_IMAGE:-zylonai/private-gpt}:${PGPT_TAG:-0.6.2}-ollama # x-release-please-version
|
||||
user: root
|
||||
build:
|
||||
context: .
|
||||
@ -27,10 +29,12 @@ services:
|
||||
- ollama-api
|
||||
depends_on:
|
||||
ollama:
|
||||
condition: service_started
|
||||
condition: service_healthy
|
||||
|
||||
# Private-GPT service for the local mode
|
||||
# This service builds from a local Dockerfile and runs the application in local mode.
|
||||
private-gpt-llamacpp-cpu:
|
||||
image: ${PGPT_IMAGE:-zylonai/private-gpt}:${PGPT_TAG:-0.6.2}-llamacpp-cpu
|
||||
image: ${PGPT_IMAGE:-zylonai/private-gpt}:${PGPT_TAG:-0.6.2}-llamacpp-cpu # x-release-please-version
|
||||
user: root
|
||||
build:
|
||||
context: .
|
||||
@ -52,8 +56,22 @@ services:
|
||||
#---- Ollama services --------------
|
||||
#-----------------------------------
|
||||
|
||||
# Traefik reverse proxy for the Ollama service
|
||||
# This will route requests to the Ollama service based on the profile.
|
||||
ollama:
|
||||
image: traefik:v2.10
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"sh",
|
||||
"-c",
|
||||
"wget -q --spider http://ollama:11434 || exit 1",
|
||||
]
|
||||
interval: 10s
|
||||
retries: 3
|
||||
start_period: 5s
|
||||
timeout: 5s
|
||||
ports:
|
||||
- "127.0.0.1:8080:8080"
|
||||
command:
|
||||
@ -68,26 +86,24 @@ services:
|
||||
- ./.docker/router.yml:/etc/router.yml:ro
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
security_opt:
|
||||
- label:disable
|
||||
profiles:
|
||||
- ""
|
||||
- ollama-cpu
|
||||
- ollama-cuda
|
||||
- ollama-api
|
||||
|
||||
# Ollama service for the CPU mode
|
||||
ollama-cpu:
|
||||
image: ollama/ollama:latest
|
||||
ports:
|
||||
- "127.0.0.1:11434:11434"
|
||||
volumes:
|
||||
- ./models:/root/.ollama:Z
|
||||
healthcheck:
|
||||
disable: true
|
||||
profiles:
|
||||
- ""
|
||||
- ollama-cpu
|
||||
|
||||
# Ollama service for the CUDA mode
|
||||
ollama-cuda:
|
||||
image: ollama/ollama:latest
|
||||
ports:
|
||||
|
||||
@ -8,17 +8,22 @@ from pathlib import Path
|
||||
|
||||
load_dotenv(".env")
|
||||
|
||||
def get_session_id() -> str:
|
||||
def initialise():
|
||||
SAVE_SESSIONS_PATH.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def get_session_id():
|
||||
if "session_id" not in st.session_state:
|
||||
session_id = st.context.headers.get("x-session-id", "anonymous")
|
||||
st.session_state["session_id"] = session_id
|
||||
else:
|
||||
session_id = st.session_state["session_id"]
|
||||
return session_id
|
||||
|
||||
def update_session_paths():
|
||||
global SAVE_STATUT, SAVE_SESSIONS_PATH, SAVE_STATUT_PATH
|
||||
|
||||
SAVE_STATUT = os.getenv("SAVE_STATUT", "statut_general.json")
|
||||
SAVE_SESSIONS_PATH = Path(f"tmp/sessions/{get_session_id()}")
|
||||
SAVE_SESSIONS_PATH.mkdir(parents=True, exist_ok=True)
|
||||
session_id = get_session_id()
|
||||
SAVE_SESSIONS_PATH = Path(f"tmp/sessions/{session_id}")
|
||||
SAVE_STATUT_PATH = SAVE_SESSIONS_PATH / SAVE_STATUT
|
||||
initialise()
|
||||
|
||||
def _maj_champ(fichier, cle: str, contenu: str = "") -> bool:
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user