Commit 35ea8043 authored by Mattia Bergagio's avatar Mattia Bergagio
Browse files

Initial commit

parents
# AI Framework
Workflow: ![workflow](_pics/workflow.jpg)
Entity and context understanding: ![entities](_pics/entities.jpg)
Tested on NVIDIA GeForce RTX 3080.
## Models
| Module | Library | GPU |
| :--- | :--- | :--- |
| osd_tvs | ffmpeg | No |
| mmc_aus | [pyannote.audio](https://github.com/pyannote/pyannote-audio) | Yes |
| osd_vcd | [PySceneDetect](https://github.com/Breakthrough/PySceneDetect) | No |
| mmc_asr | [whisper-timestamped](https://github.com/linto-ai/whisper-timestamped) | Yes |
| mmc_sir | [SpeechBrain](https://github.com/speechbrain/speechbrain) | Yes |
| paf_fir | [DeepFace](https://github.com/serengil/deepface) | Yes |
## Docker commands
Run
```
docker compose --env-file .env.template -f compose.yml up
```
## GPU drivers
NVIDIA driver: ![package](_pics/additional_drivers.png)
## Troubleshooting
If you get error message
```
docker: Error response from daemon: could not select device driver "nvidia" with capabilities: [[gpu]]
```
run
```
sudo apt-get install -y nvidia-docker2
sudo systemctl restart docker
```
version: "2"
services:
rabbitmq_dc:
image: rabbitmq:3.9.5-management-alpine
healthcheck:
test: rabbitmq-diagnostics -q ping
interval: 30s
timeout: 30s
retries: 3
restart: always
environment:
- RABBITMQ_DEFAULT_USER=${MIDDLEWARE_USER}
- RABBITMQ_DEFAULT_PASS=${MIDDLEWARE_PASSWORD}
ports:
- ${MIDDLEWARE_EXTERNAL_PORT}:15672
- ${MIDDLEWARE_PORT}:5672
volumes:
- rabbitmq_data:/var/lib/rabbitmq/mnesia/
- rabbitmq_logs:/var/log/rabbitmq/
networks:
- aifw_net
osd_ava:
image: osd_ava:${TAG}
restart: always
build:
context: https://${GIT_NAME}:${GIT_TOKEN}@gitlab.eurixgroup.com/mpai/osd_ava.git
dockerfile: Dockerfile
args:
- GIT_NAME=${GIT_NAME}
- GIT_TOKEN=${GIT_TOKEN}
environment:
- LOG_LEVEL=${LOG_LEVEL}
- AI_FW_DIR=${AI_FW_DIR}
- MIDDLEWARE_USER=${MIDDLEWARE_USER}
- MIDDLEWARE_PASSWORD=${MIDDLEWARE_PASSWORD}
- MIDDLEWARE_VIRTUALHOST=${MIDDLEWARE_VIRTUALHOST}
- MIDDLEWARE_PORT=${MIDDLEWARE_PORT}
- GIT_NAME=${GIT_NAME}
- GIT_TOKEN=${GIT_TOKEN}
volumes:
- type: bind
source: ${PATH_SHARED}
target: ${AI_FW_DIR}
- type: volume
source: osd_ava_logs
target: /LOGS
networks:
- aifw_net
depends_on:
rabbitmq_dc:
condition: service_healthy
osd_ave:
image: osd_ave:${TAG}
restart: always
build:
context: https://${GIT_NAME}:${GIT_TOKEN}@gitlab.eurixgroup.com/mpai/osd_ave.git
dockerfile: Dockerfile
args:
- GIT_NAME=${GIT_NAME}
- GIT_TOKEN=${GIT_TOKEN}
environment:
- LOG_LEVEL=${LOG_LEVEL}
- AI_FW_DIR=${AI_FW_DIR}
- MIDDLEWARE_USER=${MIDDLEWARE_USER}
- MIDDLEWARE_PASSWORD=${MIDDLEWARE_PASSWORD}
- MIDDLEWARE_VIRTUALHOST=${MIDDLEWARE_VIRTUALHOST}
- MIDDLEWARE_PORT=${MIDDLEWARE_PORT}
- GIT_NAME=${GIT_NAME}
- GIT_TOKEN=${GIT_TOKEN}
volumes:
- type: bind
source: ${PATH_SHARED}
target: ${AI_FW_DIR}
- type: volume
source: osd_ave_logs
target: /LOGS
networks:
- aifw_net
depends_on:
rabbitmq_dc:
condition: service_healthy
osd_avs:
image: osd_avs:${TAG}
restart: always
build:
context: https://${GIT_NAME}:${GIT_TOKEN}@gitlab.eurixgroup.com/mpai/osd_avs.git
dockerfile: Dockerfile
args:
- GIT_NAME=${GIT_NAME}
- GIT_TOKEN=${GIT_TOKEN}
environment:
- LOG_LEVEL=${LOG_LEVEL}
- AI_FW_DIR=${AI_FW_DIR}
- MIDDLEWARE_USER=${MIDDLEWARE_USER}
- MIDDLEWARE_PASSWORD=${MIDDLEWARE_PASSWORD}
- MIDDLEWARE_VIRTUALHOST=${MIDDLEWARE_VIRTUALHOST}
- MIDDLEWARE_PORT=${MIDDLEWARE_PORT}
- GIT_NAME=${GIT_NAME}
- GIT_TOKEN=${GIT_TOKEN}
volumes:
- type: bind
source: ${PATH_SHARED}
target: ${AI_FW_DIR}
- type: volume
source: osd_avs_logs
target: /LOGS
networks:
- aifw_net
depends_on:
rabbitmq_dc:
condition: service_healthy
mmc_asr:
image: mmc_asr:${TAG}
restart: always
build:
context: https://${GIT_NAME}:${GIT_TOKEN}@gitlab.eurixgroup.com/mpai/mmc_asr.git
dockerfile: Dockerfile
args:
- GIT_NAME=${GIT_NAME}
- GIT_TOKEN=${GIT_TOKEN}
environment:
- LOG_LEVEL=${LOG_LEVEL}
- AI_FW_DIR=${AI_FW_DIR}
- MIDDLEWARE_USER=${MIDDLEWARE_USER}
- MIDDLEWARE_PASSWORD=${MIDDLEWARE_PASSWORD}
- MIDDLEWARE_VIRTUALHOST=${MIDDLEWARE_VIRTUALHOST}
- MIDDLEWARE_PORT=${MIDDLEWARE_PORT}
- GIT_NAME=${GIT_NAME}
- GIT_TOKEN=${GIT_TOKEN}
deploy:
resources:
reservations:
devices:
- driver: nvidia
device_ids: ['0']
capabilities: [gpu]
volumes:
- type: bind
source: ${PATH_SHARED}
target: ${AI_FW_DIR}
- type: volume
source: mmc_asr_logs
target: /LOGS
networks:
- aifw_net
depends_on:
rabbitmq_dc:
condition: service_healthy
osd_tvs:
image: osd_tvs:${TAG}
restart: always
build:
context: https://${GIT_NAME}:${GIT_TOKEN}@gitlab.eurixgroup.com/mpai/osd_tvs.git
dockerfile: Dockerfile
args:
- GIT_NAME=${GIT_NAME}
- GIT_TOKEN=${GIT_TOKEN}
environment:
- LOG_LEVEL=${LOG_LEVEL}
- AI_FW_DIR=${AI_FW_DIR}
- MIDDLEWARE_USER=${MIDDLEWARE_USER}
- MIDDLEWARE_PASSWORD=${MIDDLEWARE_PASSWORD}
- MIDDLEWARE_VIRTUALHOST=${MIDDLEWARE_VIRTUALHOST}
- MIDDLEWARE_PORT=${MIDDLEWARE_PORT}
- GIT_NAME=${GIT_NAME}
- GIT_TOKEN=${GIT_TOKEN}
volumes:
- type: bind
source: ${PATH_SHARED}
target: ${AI_FW_DIR}
- type: volume
source: osd_tvs_logs
target: /LOGS
networks:
- aifw_net
depends_on:
rabbitmq_dc:
condition: service_healthy
controller:
image: controller:${TAG}
restart: always
build:
context: ./
dockerfile: controller/Dockerfile
args:
- GIT_NAME=${GIT_NAME}
- GIT_TOKEN=${GIT_TOKEN}
environment:
- LOG_LEVEL=${LOG_LEVEL}
- MIDDLEWARE_USER=${MIDDLEWARE_USER}
- MIDDLEWARE_PASSWORD=${MIDDLEWARE_PASSWORD}
- MIDDLEWARE_VIRTUALHOST=${MIDDLEWARE_VIRTUALHOST}
- MIDDLEWARE_PORT=${MIDDLEWARE_PORT}
- GIT_NAME=${GIT_NAME}
- GIT_TOKEN=${GIT_TOKEN}
- TAG=${TAG}
volumes:
# access my host's Docker service from inside container
# https://stackoverflow.com/a/71543340
- type: bind
source: /var/run/docker.sock
target: /var/run/docker.sock
- type: bind
source: ${PATH_SHARED}
target: ${AI_FW_DIR}
- type: volume
source: controller_logs
target: /LOGS
networks:
- aifw_net
depends_on:
rabbitmq_dc:
condition: service_healthy
mmc_aus:
image: mmc_aus:${TAG}
restart: always
build:
context: https://${GIT_NAME}:${GIT_TOKEN}@gitlab.eurixgroup.com/mpai/mmc_aus.git
dockerfile: Dockerfile
args:
- GIT_NAME=${GIT_NAME}
- GIT_TOKEN=${GIT_TOKEN}
environment:
- LOG_LEVEL=${LOG_LEVEL}
- AI_FW_DIR=${AI_FW_DIR}
- MIDDLEWARE_USER=${MIDDLEWARE_USER}
- MIDDLEWARE_PASSWORD=${MIDDLEWARE_PASSWORD}
- MIDDLEWARE_VIRTUALHOST=${MIDDLEWARE_VIRTUALHOST}
- MIDDLEWARE_PORT=${MIDDLEWARE_PORT}
- HUGGINGFACE_TOKEN=${HUGGINGFACE_TOKEN}
- GIT_NAME=${GIT_NAME}
- GIT_TOKEN=${GIT_TOKEN}
deploy:
resources:
reservations:
devices:
- driver: nvidia
device_ids: ['0']
capabilities: [gpu]
volumes:
- type: bind
source: ${PATH_SHARED}
target: ${AI_FW_DIR}
- type: volume
source: mmc_aus_logs
target: /LOGS
networks:
- aifw_net
depends_on:
rabbitmq_dc:
condition: service_healthy
paf_fir:
image: paf_fir:${TAG}
restart: always
build:
context: https://${GIT_NAME}:${GIT_TOKEN}@gitlab.eurixgroup.com/mpai/paf_fir.git
dockerfile: Dockerfile
args:
- GIT_NAME=${GIT_NAME}
- GIT_TOKEN=${GIT_TOKEN}
environment:
- LOG_LEVEL=${LOG_LEVEL}
- AI_FW_DIR=${AI_FW_DIR}
- MIDDLEWARE_USER=${MIDDLEWARE_USER}
- MIDDLEWARE_PASSWORD=${MIDDLEWARE_PASSWORD}
- MIDDLEWARE_VIRTUALHOST=${MIDDLEWARE_VIRTUALHOST}
- MIDDLEWARE_PORT=${MIDDLEWARE_PORT}
- GIT_NAME=${GIT_NAME}
- GIT_TOKEN=${GIT_TOKEN}
deploy:
resources:
reservations:
devices:
- driver: nvidia
device_ids: ['0']
capabilities: [gpu]
volumes:
- type: bind
source: ${PATH_SHARED}
target: ${AI_FW_DIR}
- type: volume
source: paf_fir_logs
target: /LOGS
networks:
- aifw_net
depends_on:
rabbitmq_dc:
condition: service_healthy
osd_vcd:
image: osd_vcd:${TAG}
restart: always
build:
context: https://${GIT_NAME}:${GIT_TOKEN}@gitlab.eurixgroup.com/mpai/osd_vcd.git
dockerfile: Dockerfile
args:
- GIT_NAME=${GIT_NAME}
- GIT_TOKEN=${GIT_TOKEN}
environment:
- LOG_LEVEL=${LOG_LEVEL}
- AI_FW_DIR=${AI_FW_DIR}
- MIDDLEWARE_USER=${MIDDLEWARE_USER}
- MIDDLEWARE_PASSWORD=${MIDDLEWARE_PASSWORD}
- MIDDLEWARE_VIRTUALHOST=${MIDDLEWARE_VIRTUALHOST}
- MIDDLEWARE_PORT=${MIDDLEWARE_PORT}
- GIT_NAME=${GIT_NAME}
- GIT_TOKEN=${GIT_TOKEN}
volumes:
- type: bind
source: ${PATH_SHARED}
target: ${AI_FW_DIR}
- type: volume
source: vcd_logs
target: /LOGS
networks:
- aifw_net
depends_on:
rabbitmq_dc:
condition: service_healthy
mmc_sir:
image: mmc_sir:${TAG}
restart: always
build:
context: https://${GIT_NAME}:${GIT_TOKEN}@gitlab.eurixgroup.com/mpai/mmc_sir.git
dockerfile: Dockerfile
args:
- GIT_NAME=${GIT_NAME}
- GIT_TOKEN=${GIT_TOKEN}
environment:
- LOG_LEVEL=${LOG_LEVEL}
- AI_FW_DIR=${AI_FW_DIR}
- MIDDLEWARE_USER=${MIDDLEWARE_USER}
- MIDDLEWARE_PASSWORD=${MIDDLEWARE_PASSWORD}
- MIDDLEWARE_VIRTUALHOST=${MIDDLEWARE_VIRTUALHOST}
- MIDDLEWARE_PORT=${MIDDLEWARE_PORT}
- GIT_NAME=${GIT_NAME}
- GIT_TOKEN=${GIT_TOKEN}
deploy:
resources:
reservations:
devices:
- driver: nvidia
device_ids: ['0']
capabilities: [gpu]
volumes:
- type: bind
source: ${PATH_SHARED}
target: ${AI_FW_DIR}
- type: volume
source: mmc_sir_logs
target: /LOGS
networks:
- aifw_net
depends_on:
rabbitmq_dc:
condition: service_healthy
volumes:
rabbitmq_data:
driver: local
rabbitmq_logs:
driver: local
controller_logs:
driver: local
osd_ava_logs:
driver: local
osd_ave_logs:
driver: local
osd_avs_logs:
driver: local
mmc_asr_logs:
driver: local
osd_tvs_logs:
driver: local
mmc_aus_logs:
driver: local
paf_fir_logs:
driver: local
vcd_logs:
driver: local
mmc_sir_logs:
driver: local
networks:
aifw_net:
driver: bridge
\ No newline at end of file
FROM python:3.9.16-slim-bullseye
ENV TZ='Europe/Rome'
ENV BASE_FOLDER='/CTRLR'
ENV LOGS_FOLDER='/LOGS'
ENV APP_USER='devuser'
ARG GIT_NAME
ARG GIT_TOKEN
RUN \
apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y \
tzdata \
# TODO Other packets common to all images go in here
curl \
iputils-ping \
git \
&& \
apt-get clean && \
rm -rf /var/lib/apt/lists/* && \
ln -snf "/usr/share/zoneinfo/$TZ" '/etc/localtime' && \
echo "$TZ" > '/etc/timezone'
RUN useradd -m "$APP_USER" && \
mkdir -p "$BASE_FOLDER" && \
chown -R "${APP_USER}:${APP_USER}" "$BASE_FOLDER" && \
mkdir -p "$LOGS_FOLDER" && \
chown -R "${APP_USER}:${APP_USER}" "$LOGS_FOLDER"
USER "$APP_USER"
WORKDIR "$BASE_FOLDER"
ENV PATH="${PATH}:/root/.local/bin:/home/${APP_USER}/.local/bin"
ENV PYTHONPATH="${PYTHONPATH}:${BASE_FOLDER}"
COPY --chown="${APP_USER}:${APP_USER}" /controller/requirements.txt ./requirements.txt
RUN \
python3 -m pip install --upgrade pip && \
python3 -m pip install --no-cache-dir -r requirements.txt
WORKDIR "$BASE_FOLDER"/src
RUN git clone https://${GIT_NAME}:${GIT_TOKEN}@gitlab.eurixgroup.com/mpai/common_module.git
RUN pwd
RUN ls
WORKDIR ..
COPY --chown="${APP_USER}:${APP_USER}" /controller/src ./src
RUN pwd
RUN ls
CMD ["python", "src/main.py"]
docker
pika==1.3.1
typeguard==4.1.5
typing_extensions==4.8.0
\ No newline at end of file
from run_funs import run
try:
from common_utils import adapter
except ModuleNotFoundError:
from common_module.common_utils import adapter
try:
from common_utils import rabbitmq
except ModuleNotFoundError:
from common_module.common_utils import rabbitmq
if __name__ == "__main__":
Worker = rabbitmq.Worker()
Worker.register_callback(queue="queue_module_controller", callback=run)
this_adapter = adapter.Adapter(Worker)
this_adapter.start_listening()
import os
import docker
from typeguard import typechecked
try:
from common_utils import msg_builder, rabbitmq
except ModuleNotFoundError:
from common_module.common_utils import msg_builder, rabbitmq
try:
from common_utils.logger import create_logger
except ModuleNotFoundError:
from common_module.common_utils.logger import create_logger
# Docker client
docker_client = docker.from_env()
LOGGER = create_logger(__name__)
@typechecked
def run_container(module: str) -> int:
"""
Runs container.
"""
# read vols
# key: path on my machine/host
# value of key "bind": path inside container
# "ro": read only
# "rw": read & write
# https://stackoverflow.com/a/74524696
# Docker prepends dir name to named vols to prevent clashes w existing containers!
# https://forums.docker.com/t/docker-compose-prepends-directory-name-to-named-volumes/32835/2
# TODO
# change prepended str if dir is renamed
vols = {
"ai-framework_namedin": {"bind": "/in", "mode": "rw"},
"ai-framework_namedout": {"bind": "/out", "mode": "rw"},
}
# solver
# network: name of the network the container will be connected to at creation time
# Docker prepends dir name to network!
solver_container = docker_client.containers.run(
image=f"{module}:{os.environ['TAG']}",
detach=True,
auto_remove=False,
environment=envs,
volumes=vols,
device_requests=[
docker.types.DeviceRequest(device_ids=["0"],
capabilities=[['gpu']])],
network="ai-framework_aifw_net",
)
return solver_container.id
@typechecked
def kill_container(solver_container_id: int):
"""
Kills container.
"""
# get container
running_containers = docker_client.containers.list(
filters={"id": solver_container_id}
)
for container in running_containers:
container.kill()
@typechecked
def run(message_body: dict, worker: rabbitmq.Worker) -> bool:
"""
ctrler's msg_builder.build_msg.
Returns False.
"""
if not msg_builder.validate_message(
message_body,
["external_id", "application", "uid", "job_status", "process_status"],
):
LOGGER.error(f"bad msg: {message_body=}")
else:
# default vals
uid = message_body["programme"]["uid"]
force = False
queue_name = ""
if "force" in message_body["programme"]:
force_v = message_body["programme"]["force"]
LOGGER.debug(f"{type(force_v)=}")
if force_v is not None and (
(isinstance(force_v, bool) and force_v)
or (isinstance(force_v, str) and force_v.lower() == "true")
):
force = True
LOGGER.debug(f"{force=}")
if message_body["programme"]["job_status"] == "start":
queue_name = "queue_module_osd_tvs"
else:
# job_status = "working"
# read val of key "module"
if message_body["programme"]["process_status"] == "completed":
if message_body["programme"]["module"] == "osd_tvs":
queue_name = "queue_module_mmc_aus"
elif message_body["programme"]["module"] == "mmc_aus":
queue_name = "queue_module_osd_vcd"
elif message_body["programme"]["module"] == "osd_vcd":
queue_name = "queue_module_mmc_sir"
elif message_body["programme"]["module"] == "mmc_sir":
queue_name = "queue_module_mmc_asr"
elif message_body["programme"]["module"] == "mmc_asr":
queue_name = "queue_module_paf_fir"
elif message_body["programme"]["module"] == "paf_fir":
queue_name = "queue_module_osd_ava"
elif message_body["programme"]["module"] == "osd_ava":
queue_name = "queue_module_osd_avs"
elif message_body["programme"]["module"] == "osd_avs":
queue_name = "queue_module_osd_ave"
LOGGER.debug(
"module "
f"{message_body['programme']['module']} "
f"{message_body['programme']['process_status']} "
"response"
)
LOGGER.debug(f"{queue_name=}")
if queue_name != "":
# send data to analyzer
s_job_status = "--"
if "job_status" in message_body["programme"]:
s_job_status = message_body["programme"]["job_status"]
s_process_status = "--"
if "process_status" in message_body["programme"]:
s_process_status = message_body["programme"]["process_status"]
LOGGER.debug(
f"[TRACE][{uid}][SEND] queue: {queue_name}"
f" -- job_status: {s_job_status}"
f" -- process_status: {s_process_status}"
)
message_body["programme"]["force"] = force
worker.send_messages(queue=queue_name, messages=(message_body,))
else:
if "process_status" in message_body["programme"]:
if not (
"working" in message_body["programme"]["process_status"]
or "failed" in message_body["programme"]["process_status"]
):
LOGGER.warn(
f"bad msg: {message_body['programme']['process_status']=}"
)
return False
# Example 1
Make dir ```${PATH_SHARED}/vids/2024-03-11_11h05m```
Download vid [Conan_on_TBS.mp4](https://www.dropbox.com/scl/fi/qwbtgwzi7gj15o20vzuzu/Conan_on_TBS.mp4?rlkey=n825e1gn8tvl24l31su4em7oo&dl=0) to that folder
Call vid ```Conan_on_TBS.mp4```
JSON: ```rabbit_in_1.json```
# Example 2
Make dir ```${PATH_SHARED}/vids/2024-03-12_14h32m```
Download vid [Jennifer_Lawrence_Shares_Her_Most_Embarrassing_Moments.mp4](https://www.dropbox.com/scl/fi/yp3e5lc6ru027pjnbyw6r/Jennifer_Lawrence_Shares_Her_Most_Embarrassing_Moments.mp4?rlkey=e6bnznkblr3pcas5jejeu0f53&dl=0) to that folder
Call vid ```Jennifer_Lawrence_Shares_Her_Most_Embarrassing_Moments.mp4```
JSON: ```rabbit_in_2.json```
# All examples
Download [Voxceleb1, test set](https://cn01.mmai.io/download/voxceleb?key=7dcdc3175d007a80cf5f8af9326b8122019525f88115b53fdbfe47d12654e2e1666ebb8f53c72ea92d8d9e7038de79564d7df711eff192b44b4cbc6e1297951b4c6cc41baaf8e67e51f50867c5e7200fea9b80a3aa7c91221681d62679b1bdb5a3ffe76f9650451e2331300e22e35d9102560aca2ff18ba3f073bb7a7f5a45a3&file=vox1_test_wav.zip)
Unzip it to ```${PATH_SHARED}/datasets/voxceleb1/test/vox1_test_wav```
Download [extras](https://www.dropbox.com/scl/fo/gjfy5ao6kl3gquvym6ikf/AIS5Zww23qw5oBKtt20ygQU?rlkey=mjpklo6l41azxo7ph4hzppwm1&dl=0) to that folder
This way
```sh
ls ${PATH_SHARED}/datasets/voxceleb1/test/vox1_test_wav/
bill_burr/ id10273/ id10278/ id10283/ id10288/ id10293/ id10298/ id10303/ id10308/
conan_o'_brien/ id10274/ id10279/ id10284/ id10289/ id10294/ id10299/ id10304/ id10309/
id10270/ id10275/ id10280/ id10285/ id10290/ id10295/ id10300/ id10305/ jennifer_lawrence/
id10271/ id10276/ id10281/ id10286/ id10291/ id10296/ id10301/ id10306/ jimmy_fallon/
id10272/ id10277/ id10282/ id10287/ id10292/ id10297/ id10302/ id10307/
```
Download [funneled LFW](http://vis-www.cs.umass.edu/lfw/lfw-funneled.tgz)
Unzip it to ```${PATH_SHARED}/datasets/lfw_funneled```
Keep folders with 6 ≤ no. imgs ≤ 9
Download [extra faces](https://www.dropbox.com/scl/fo/n2ww24evjg0qqh6gvd7if/AGb05a1S6kl4Ai-cAT4knn8?rlkey=ovjul88jw4hhi1ugv46cwcyha&st=w3ntumhz&dl=0) to that folder
This way
```sh
ls ${PATH_SHARED}/datasets/lfw_funneled
Albert_Costa Bob_Stoops Elton_John 'Jimmy Fallon' Leonardo_DiCaprio Monica_Seles Robert_Redford Tony_Stewart
Al_Gore Boris_Becker Fernando_Gonzalez JK_Rowling Leonid_Kuchma Naji_Sabri Robert_Zoellick Tung_Chee-hwa
Ali_Naimi Bulent_Ecevit Fernando_Henrique_Cardoso Joan_Laporta Li_Peng Natalie_Coughlin Romano_Prodi Vaclav_Havel
Al_Sharpton Calista_Flockhart Fujio_Cho John_Abizaid Liza_Minnelli Norm_Coleman Roman_Polanski Valentino_Rossi
Amelia_Vega Cameron_Diaz George_Clooney John_Edwards Li_Zhaoxing Oscar_De_La_Hoya Sarah_Hughes Valery_Giscard_dEstaing
Ana_Guevara Carmen_Electra Gerry_Adams John_Manley Luis_Ernesto_Derbez_Bautista Paula_Radcliffe Sarah_Jessica_Parker Vojislav_Kostunica
Ana_Palacio Celine_Dion Goldie_Hawn John_McCain Marco_Antonio_Barrera Paul_Martin Sheryl_Crow William_Donaldson
Angela_Bassett Cesar_Gaviria Gwyneth_Paltrow John_Travolta Mariah_Carey Paul_McCartney Shimon_Peres William_Ford_Jr
Antonio_Palocci Charlton_Heston Heizo_Takenaka Jonathan_Edwards Maria_Shriver Paul_ONeill Silvan_Shalom Yao_Ming
Arminio_Fraga Christine_Todd_Whitman Hosni_Mubarak Jon_Gruden Martin_Scorsese Pedro_Almodovar Sophia_Loren Yashwant_Sinha
Ben_Affleck Clint_Eastwood Hugh_Grant Jose_Serra Matthew_Perry Ray_Romano Steve_Lavin Yasser_Arafat
'Bill Burr' Colin_Farrell Jan_Ullrich Juan_Pablo_Montoya Michael_Chang Ricardo_Sanchez Steven_Spielberg Yoko_Ono
Bill_Frist "Conan O' Brien" Jean-Pierre_Raffarin Justine_Pasek Michael_Douglas Richard_Armitage Susan_Sarandon Zhu_Rongji
Bill_Graham Costas_Simitis Jelena_Dokic Justin_Timberlake Michelle_Kwan Richard_Virenque Sylvester_Stallone Zinedine_Zidane
Billy_Crystal David_Wells 'Jennifer Lawrence' Kamal_Kharrazi Mike_Krzyzewski Rick_Perry Tariq_Aziz
Binyamin_Ben-Eliezer Dennis_Hastert Jeong_Se-hyun Kate_Hudson Mike_Martz Robert_Blake Thaksin_Shinawatra
Bob_Graham Dennis_Kucinich Jesse_Jackson Kim_Dae-jung Mike_Myers Robert_De_Niro Thomas_OBrien
Bob_Hope Elsa_Zylberstein Jimmy_Carter Larry_Brown Mohamed_ElBaradei Robert_Duvall Tommy_Haas
```
Go to `http://localhost:${MIDDLEWARE_EXTERNAL_PORT}/#/queues/%2F/queue_module_controller`
Username: ```${MIDDLEWARE_USER}```
Password: ```${MIDDLEWARE_PASSWORD}```
Scroll down to **Publish message**
Copy-paste the content of the JSON into **Payload** ![ctrler_payload](../_pics/ctrler_payload.png)
Click **Publish message**
Done!
{
"programme": {
"uid": "2024-03-11_11h05m",
"application": "AIF_v0.1",
"external_id": "Conan_on_TBS",
"job_status": "start",
"process_status": "working",
"images_per_scene": 5,
"spkrec_dataset": [
"datasets",
"voxceleb1",
"test",
"vox1_test_wav"
],
"model_name": "Facenet512",
"detector_backend": "mtcnn",
"distance_metric": "euclidean_l2",
"facrec_dataset": [
"datasets",
"lfw_funneled"
],
"force": "true"
}
}
\ No newline at end of file
{
"programme": {
"uid": "2024-03-12_14h32m",
"application": "AIF_v0.1",
"external_id": "Jennifer_Lawrence_Shares_Her_Most_Embarrassing_Moments",
"job_status": "start",
"process_status": "working",
"images_per_scene": 5,
"spkrec_dataset": [
"datasets",
"voxceleb1",
"test",
"vox1_test_wav"
],
"model_name": "Facenet512",
"detector_backend": "mtcnn",
"distance_metric": "euclidean_l2",
"facrec_dataset": [
"datasets",
"lfw_funneled"
],
"force": "true"
}
}
\ No newline at end of file
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment