{
"name": "mpt-7b-chat",
"version": "1",
"creation_timestamp": 1729803394606,
"last_updated_timestamp": 1729803394606,
"current_stage": "None",
"source": "file:///home/isinyaaa/repos/redhat/mlflow-exp/mlruns/554850363279179864/f338317e81ec466ca826bf6add1f350a/artifacts/mpt-7b",
"run_id": "f338317e81ec466ca826bf6add1f350a",
"status": "READY"
}
{
"registered_model": {
"name": "mpt-7b-chat",
"creation_timestamp": 1729803394604,
"last_updated_timestamp": 1730147973395,
"latest_versions": [
{
"name": "mpt-7b-chat",
"version": "11",
"creation_timestamp": 1730147973395,
"last_updated_timestamp": 1730147973395,
"current_stage": "None",
"source": "file:///home/isinyaaa/repos/redhat/mlflow-exp/mlruns/554850363279179864/2bd44562f5ea46688bf00d4235e08aff/artifacts/mpt-7b",
"run_id": "2bd44562f5ea46688bf00d4235e08aff",
"status": "READY"
}
]
}
}
{
"experiment": {
"experiment_id": "0",
"name": "Default",
"artifact_location": "mlflow-artifacts:/0",
"lifecycle_stage": "active",
"last_update_time": 1729798254874,
"creation_time": 1729798254874
}
}
{
"run": {
"info": {
"run_uuid": "2bd44562f5ea46688bf00d4235e08aff",
"experiment_id": "554850363279179864",
"run_name": "languid-cod-120",
"user_id": "isinyaaa",
"status": "FINISHED",
"start_time": 1730147960082,
"end_time": 1730147973605,
"artifact_uri": "file:///home/isinyaaa/repos/redhat/mlflow-exp/mlruns/554850363279179864/2bd44562f5ea46688bf00d4235e08aff/artifacts",
"lifecycle_stage": "active",
"run_id": "2bd44562f5ea46688bf00d4235e08aff"
},
"data": {
"tags": [
{
"key": "mlflow.user",
"value": "isinyaaa"
},
{
"key": "mlflow.source.name",
"value": "exp.py"
},
{
"key": "mlflow.source.type",
"value": "LOCAL"
},
{
"key": "mlflow.runName",
"value": "languid-cod-120"
},
{
"key": "mlflow.log-model.history",
"value": "[{\"run_id\": \"2bd44562f5ea46688bf00d4235e08aff\", \"artifact_path\": \"mpt-7b\", \"utc_time_created\": \"2024-10-28 20:39:20.088683\", \"model_uuid\": \"cefd8989b5c245ebb8b130d239dd575d\", \"flavors\": {\"transformers\": {\"transformers_version\": \"4.46.0\", \"code\": null, \"task\": \"text-generation\", \"instance_type\": \"TextGenerationPipeline\", \"framework\": \"pt\", \"torch_dtype\": \"torch.float16\", \"pipeline_model_type\": \"LlamaForCausalLM\", \"source_model_name\": \"meta-llama/Llama-3.2-3B\", \"model_binary\": \"model\", \"tokenizer_type\": \"PreTrainedTokenizerFast\", \"components\": [\"tokenizer\"]}, \"python_function\": {\"loader_module\": \"mlflow.transformers\", \"python_version\": \"3.12.1\", \"env\": {\"conda\": \"conda.yaml\", \"virtualenv\": \"python_env.yaml\"}}}}]"
}
]
},
"inputs": {}
}
}