A collection of LLM inference providers and models 🐈
1FROM alpine 2COPY fur /usr/bin/fur 3EXPOSE 8080 4CMD ["/usr/bin/fur"]