Spaces:
Running
Running
| # Set environment variables for the ollama server | |
| export OLLAMA_HOST=0.0.0.0 | |
| export OLLAMA_ORIGINS="*" | |
| # Start the Ollama service in the background | |
| ollama serve & | |
| # Wait for the service to initialize | |
| sleep 10 | |
| # Create the model using Ollama | |
| ollama create llama3.2-tunned -f Modelfile | |
| ollama pull llama3.2:1b | |
| ollama pull qwen2.5:0.5b | |
| # Keep the container running indefinitely | |
| tail -f /dev/null | |